answer
stringlengths
17
10.2M
package jolie.net; import com.google.gwt.user.client.rpc.SerializationException; import com.google.gwt.user.server.rpc.RPC; import com.google.gwt.user.server.rpc.RPCRequest; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.StringReader; import java.net.URI; import java.net.URLDecoder; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Base64; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.OutputKeys; import javax.xml.transform.Result; import javax.xml.transform.Source; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import jolie.Interpreter; import jolie.js.JsUtils; import jolie.lang.Constants; import jolie.lang.NativeType; import jolie.net.http.HttpMessage; import jolie.net.http.HttpParser; import jolie.net.http.HttpUtils; import jolie.net.http.Method; import jolie.net.http.MultiPartFormDataParser; import jolie.net.ports.Interface; import jolie.net.protocols.CommProtocol; import jolie.runtime.ByteArray; import jolie.runtime.Value; import jolie.runtime.ValueVector; import jolie.runtime.VariablePath; import jolie.runtime.typing.OneWayTypeDescription; import jolie.runtime.typing.OperationTypeDescription; import jolie.runtime.typing.RequestResponseTypeDescription; import jolie.runtime.typing.Type; import jolie.runtime.typing.TypeCastingException; import jolie.util.LocationParser; import jolie.xml.XmlUtils; import joliex.gwt.client.JolieService; import joliex.gwt.server.JolieGWTConverter; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.xml.sax.InputSource; import org.xml.sax.SAXException; /** * HTTP protocol implementation * @author Fabrizio Montesi * 14 Nov 2012 - Saverio Giallorenzo - Fabrizio Montesi: support for status codes */ public class HttpProtocol extends CommProtocol implements HttpUtils.HttpProtocol { private static final int DEFAULT_STATUS_CODE = 200; private static final int DEFAULT_REDIRECTION_STATUS_CODE = 303; private static final String DEFAULT_CONTENT_TYPE = "application/octet-stream"; // default content type per RFC 2616#7.2.1 private static final String DEFAULT_FORMAT = "xml"; private static final Map< Integer, String > statusCodeDescriptions = new HashMap<>(); private static final Set< Integer > locationRequiredStatusCodes = new HashSet<>(); static { locationRequiredStatusCodes.add( 301 ); locationRequiredStatusCodes.add( 302 ); locationRequiredStatusCodes.add( 303 ); locationRequiredStatusCodes.add( 307 ); locationRequiredStatusCodes.add( 308 ); } static { // Initialise the HTTP Status code map. statusCodeDescriptions.put( 100,"Continue" ); statusCodeDescriptions.put( 101,"Switching Protocols" ); statusCodeDescriptions.put( 102,"Processing" ); statusCodeDescriptions.put( 200,"OK" ); statusCodeDescriptions.put( 201,"Created" ); statusCodeDescriptions.put( 202,"Accepted" ); statusCodeDescriptions.put( 203,"Non-Authoritative Information" ); statusCodeDescriptions.put( 204,"No Content" ); statusCodeDescriptions.put( 205,"Reset Content" ); statusCodeDescriptions.put( 206,"Partial Content" ); statusCodeDescriptions.put( 207,"Multi-Status" ); statusCodeDescriptions.put( 208,"Already Reported" ); statusCodeDescriptions.put( 226,"IM Used" ); statusCodeDescriptions.put( 300,"Multiple Choices" ); statusCodeDescriptions.put( 301,"Moved Permanently" ); statusCodeDescriptions.put( 302,"Found" ); statusCodeDescriptions.put( 303,"See Other" ); statusCodeDescriptions.put( 304,"Not Modified" ); statusCodeDescriptions.put( 305,"Use Proxy" ); statusCodeDescriptions.put( 306,"Reserved" ); statusCodeDescriptions.put( 307,"Temporary Redirect" ); statusCodeDescriptions.put( 308,"Permanent Redirect" ); statusCodeDescriptions.put( 400,"Bad Request" ); statusCodeDescriptions.put( 401,"Unauthorized" ); statusCodeDescriptions.put( 402,"Payment Required" ); statusCodeDescriptions.put( 403,"Forbidden" ); statusCodeDescriptions.put( 404,"Not Found" ); statusCodeDescriptions.put( 405,"Method Not Allowed" ); statusCodeDescriptions.put( 406,"Not Acceptable" ); statusCodeDescriptions.put( 407,"Proxy Authentication Required" ); statusCodeDescriptions.put( 408,"Request Timeout" ); statusCodeDescriptions.put( 409,"Conflict" ); statusCodeDescriptions.put( 410,"Gone" ); statusCodeDescriptions.put( 411,"Length Required" ); statusCodeDescriptions.put( 412,"Precondition Failed" ); statusCodeDescriptions.put( 413,"Request Entity Too Large" ); statusCodeDescriptions.put( 414,"Request-URI Too Long" ); statusCodeDescriptions.put( 415,"Unsupported Media Type" ); statusCodeDescriptions.put( 416,"Requested Range Not Satisfiable" ); statusCodeDescriptions.put( 417,"Expectation Failed" ); statusCodeDescriptions.put( 422,"Unprocessable Entity" ); statusCodeDescriptions.put( 423,"Locked" ); statusCodeDescriptions.put( 424,"Failed Dependency" ); statusCodeDescriptions.put( 426,"Upgrade Required" ); statusCodeDescriptions.put( 427,"Unassigned" ); statusCodeDescriptions.put( 428,"Precondition Required" ); statusCodeDescriptions.put( 429,"Too Many Requests" ); statusCodeDescriptions.put( 430,"Unassigned" ); statusCodeDescriptions.put( 431,"Request Header Fields Too Large" ); statusCodeDescriptions.put( 500,"Internal Server Error" ); statusCodeDescriptions.put( 501,"Not Implemented" ); statusCodeDescriptions.put( 502,"Bad Gateway" ); statusCodeDescriptions.put( 503,"Service Unavailable" ); statusCodeDescriptions.put( 504,"Gateway Timeout" ); statusCodeDescriptions.put( 505,"HTTP Version Not Supported" ); statusCodeDescriptions.put( 507,"Insufficient Storage" ); statusCodeDescriptions.put( 508,"Loop Detected" ); statusCodeDescriptions.put( 509,"Unassigned" ); statusCodeDescriptions.put( 510,"Not Extended" ); statusCodeDescriptions.put( 511,"Network Authentication Required" ); } private static class Parameters { private static final String KEEP_ALIVE = "keepAlive"; private static final String DEBUG = "debug"; private static final String COOKIES = "cookies"; private static final String METHOD = "method"; private static final String ALIAS = "alias"; private static final String MULTIPART_HEADERS = "multipartHeaders"; private static final String CONCURRENT = "concurrent"; private static final String USER_AGENT = "userAgent"; private static final String HOST = "host"; private static final String HEADERS = "headers"; private static final String ADD_HEADERS = "addHeader"; private static final String STATUS_CODE = "statusCode"; private static final String REDIRECT = "redirect"; private static final String DEFAULT_OPERATION = "default"; private static final String COMPRESSION = "compression"; private static final String COMPRESSION_TYPES = "compressionTypes"; private static final String REQUEST_COMPRESSION = "requestCompression"; private static final String FORMAT = "format"; private static final String RESPONSE_HEADER = "responseHeaders"; private static final String JSON_ENCODING = "json_encoding"; private static final String REQUEST_USER = "request"; private static final String RESPONSE_USER = "response"; private static final String HEADER_USER = "headers"; private static final String CHARSET = "charset"; private static final String CONTENT_TYPE = "contentType"; private static final String CONTENT_TRANSFER_ENCODING = "contentTransferEncoding"; private static final String CONTENT_DISPOSITION = "contentDisposition"; private static final String DROP_URI_PATH = "dropURIPath"; private static final String CACHE_CONTROL = "cacheControl"; private static final String FORCE_CONTENT_DECODING = "forceContentDecoding"; private static class MultiPartHeaders { private static final String FILENAME = "filename"; } } private static class Headers { private static final String JOLIE_MESSAGE_ID = "X-Jolie-MessageID"; } private static class ContentTypes { private static final String APPLICATION_JSON = "application/json"; } private String inputId = null; private final Transformer transformer; private final DocumentBuilderFactory docBuilderFactory; private final DocumentBuilder docBuilder; private final URI uri; private final boolean inInputPort; private MultiPartFormDataParser multiPartFormDataParser = null; @Override public String name() { return "http"; } @Override public boolean isThreadSafe() { return checkBooleanParameter( Parameters.CONCURRENT ); } public HttpProtocol( VariablePath configurationPath, URI uri, boolean inInputPort, TransformerFactory transformerFactory, DocumentBuilderFactory docBuilderFactory, DocumentBuilder docBuilder ) throws TransformerConfigurationException { super( configurationPath ); this.uri = uri; this.inInputPort = inInputPort; this.transformer = transformerFactory.newTransformer(); this.docBuilderFactory = docBuilderFactory; this.docBuilder = docBuilder; transformer.setOutputProperty( OutputKeys.OMIT_XML_DECLARATION, "yes" ); transformer.setOutputProperty( OutputKeys.INDENT, "no" ); } public String getMultipartHeaderForPart( String operationName, String partName ) { if ( hasOperationSpecificParameter( operationName, Parameters.MULTIPART_HEADERS ) ) { Value v = getOperationSpecificParameterFirstValue( operationName, Parameters.MULTIPART_HEADERS ); if ( v.hasChildren( partName ) ) { v = v.getFirstChild( partName ); if ( v.hasChildren( Parameters.MultiPartHeaders.FILENAME ) ) { v = v.getFirstChild( Parameters.MultiPartHeaders.FILENAME ); return v.strValue(); } } } return null; } private final static String BOUNDARY = "----jol13h77p77bound4r155"; private void send_appendCookies( CommMessage message, String hostname, StringBuilder headerBuilder ) { Value cookieParam = null; if ( hasOperationSpecificParameter( message.operationName(), Parameters.COOKIES ) ) { cookieParam = getOperationSpecificParameterFirstValue( message.operationName(), Parameters.COOKIES ); } else if ( hasParameter( Parameters.COOKIES ) ) { cookieParam = getParameterFirstValue( Parameters.COOKIES ); } if ( cookieParam != null ) { Value cookieConfig; String domain; StringBuilder cookieSB = new StringBuilder(); for( Entry< String, ValueVector > entry : cookieParam.children().entrySet() ) { cookieConfig = entry.getValue().first(); if ( message.value().hasChildren( cookieConfig.strValue() ) ) { domain = cookieConfig.hasChildren( "domain" ) ? cookieConfig.getFirstChild( "domain" ).strValue() : ""; if ( domain.isEmpty() || hostname.endsWith( domain ) ) { cookieSB .append( entry.getKey() ) .append( '=' ) .append( message.value().getFirstChild( cookieConfig.strValue() ).strValue() ) .append( ";" ); } } } if ( cookieSB.length() > 0 ) { headerBuilder .append( "Cookie: " ) .append( cookieSB ) .append( HttpUtils.CRLF ); } } } private void send_appendSetCookieHeader( CommMessage message, StringBuilder headerBuilder ) { Value cookieParam = null; if ( hasOperationSpecificParameter( message.operationName(), Parameters.COOKIES ) ) { cookieParam = getOperationSpecificParameterFirstValue( message.operationName(), Parameters.COOKIES ); } else if ( hasParameter( Parameters.COOKIES ) ) { cookieParam = getParameterFirstValue( Parameters.COOKIES ); } if ( cookieParam != null ) { Value cookieConfig; for( Entry< String, ValueVector > entry : cookieParam.children().entrySet() ) { cookieConfig = entry.getValue().first(); if ( message.value().hasChildren( cookieConfig.strValue() ) ) { headerBuilder .append( "Set-Cookie: " ) .append( entry.getKey() ).append( '=' ) .append( message.value().getFirstChild( cookieConfig.strValue() ).strValue() ) .append( "; expires=" ) .append( cookieConfig.hasChildren( "expires" ) ? cookieConfig.getFirstChild( "expires" ).strValue() : "" ) .append( "; domain=" ) .append( cookieConfig.hasChildren( "domain" ) ? cookieConfig.getFirstChild( "domain" ).strValue() : "" ) .append( "; path=" ) .append( cookieConfig.hasChildren( "path" ) ? cookieConfig.getFirstChild( "path" ).strValue() : "" ); if ( cookieConfig.hasChildren( "secure" ) && cookieConfig.getFirstChild( "secure" ).intValue() > 0 ) { headerBuilder.append( "; secure" ); } headerBuilder.append( HttpUtils.CRLF ); } } } } private String encoding = null; private String responseFormat = null; private boolean headRequest = false; private static void send_appendQuerystring( Value value, StringBuilder headerBuilder ) throws IOException { if ( !value.children().isEmpty() ) { headerBuilder.append( '?' ); for( Entry< String, ValueVector > entry : value.children().entrySet() ) { for( Value v : entry.getValue() ) { headerBuilder .append( URLEncoder.encode( entry.getKey(), HttpUtils.URL_DECODER_ENC ) ) .append( '=' ) .append( URLEncoder.encode( v.strValue(), HttpUtils.URL_DECODER_ENC ) ) .append( '&' ); } } } } private void send_appendJsonQueryString( CommMessage message, StringBuilder headerBuilder ) throws IOException { if ( message.value().isDefined() || message.value().hasChildren() ) { headerBuilder.append( "?" ); StringBuilder builder = new StringBuilder(); JsUtils.valueToJsonString( message.value(), true, getSendType( message ), builder ); headerBuilder.append( URLEncoder.encode( builder.toString(), HttpUtils.URL_DECODER_ENC ) ); } } private static void send_appendParsedAlias( String alias, Value value, StringBuilder headerBuilder ) throws IOException { int offset = 0; List< String > aliasKeys = new ArrayList<>(); String currStrValue; String currKey; StringBuilder result = new StringBuilder( alias ); Matcher m = Pattern.compile( "%(!)?\\{[^\\}]*\\}" ).matcher( alias ); while( m.find() ) { int displacement = 2; if ( m.group( 1 ) == null ) { // ! is missing after %: We have to use URLEncoder currKey = alias.substring( m.start() + displacement, m.end() - 1 ); if ( "$".equals( currKey ) ) { currStrValue = URLEncoder.encode( value.strValue(), HttpUtils.URL_DECODER_ENC ); } else { currStrValue = URLEncoder.encode( value.getFirstChild( currKey ).strValue(), HttpUtils.URL_DECODER_ENC ); aliasKeys.add( currKey ); } } else { // ! is given after %: We have to insert the string raw displacement = 3; currKey = alias.substring( m.start() + displacement, m.end() - 1 ); if ( "$".equals( currKey ) ) { currStrValue = value.strValue(); } else { currStrValue = value.getFirstChild( currKey ).strValue(); aliasKeys.add( currKey ); } } result.replace( m.start() + offset, m.end() + offset, currStrValue ); displacement++; //considering also } offset += currStrValue.length() - displacement - currKey.length(); } // removing used keys for( String aliasKey : aliasKeys ) { value.children().remove( aliasKey ); } headerBuilder.append( result ); } private String send_getFormat() { String format = DEFAULT_FORMAT; if ( inInputPort && responseFormat != null ) { format = responseFormat; responseFormat = null; } else if ( hasParameter( Parameters.FORMAT ) ) { format = getStringParameter( Parameters.FORMAT ); } return format; } private static class EncodedContent { private ByteArray content = null; private String contentType = DEFAULT_CONTENT_TYPE; private String contentDisposition = ""; } private EncodedContent send_encodeContent( CommMessage message, Method method, String charset, String format ) throws IOException { EncodedContent ret = new EncodedContent(); if ( inInputPort == false && method == Method.GET ) { // We are building a GET request return ret; } if ( "xml".equals( format ) ) { ret.contentType = "text/xml"; Document doc = docBuilder.newDocument(); Element root = doc.createElement( message.operationName() + (( inInputPort ) ? "Response" : "") ); doc.appendChild( root ); if ( message.isFault() ) { Element faultElement = doc.createElement( message.fault().faultName() ); root.appendChild( faultElement ); XmlUtils.valueToDocument( message.fault().value(), faultElement, doc ); } else { XmlUtils.valueToDocument( message.value(), root, doc ); } Source src = new DOMSource( doc ); ByteArrayOutputStream tmpStream = new ByteArrayOutputStream(); Result dest = new StreamResult( tmpStream ); transformer.setOutputProperty( OutputKeys.ENCODING, charset ); try { transformer.transform( src, dest ); } catch( TransformerException e ) { throw new IOException( e ); } ret.content = new ByteArray( tmpStream.toByteArray() ); } else if ( "binary".equals( format ) ) { ret.contentType = "application/octet-stream"; ret.content = message.value().byteArrayValue(); } else if ( "html".equals( format ) ) { ret.contentType = "text/html"; if ( message.isFault() ) { StringBuilder builder = new StringBuilder(); builder.append( "<html><head><title>" ); builder.append( message.fault().faultName() ); builder.append( "</title></head><body>" ); builder.append( message.fault().value().strValue() ); builder.append( "</body></html>" ); ret.content = new ByteArray( builder.toString().getBytes( charset ) ); } else { ret.content = new ByteArray( message.value().strValue().getBytes( charset ) ); } } else if ( "multipart/form-data".equals( format ) ) { ret.contentType = "multipart/form-data; boundary=" + BOUNDARY; ByteArrayOutputStream bStream = new ByteArrayOutputStream(); StringBuilder builder = new StringBuilder(); for( Entry< String, ValueVector > entry : message.value().children().entrySet() ) { if ( !entry.getKey().startsWith( "@" ) ) { builder.append( "--" ).append( BOUNDARY ).append( HttpUtils.CRLF ); builder.append( "Content-Disposition: form-data; name=\"" ).append( entry.getKey() ).append( '\"' ); boolean isBinary = false; if ( hasOperationSpecificParameter( message.operationName(), Parameters.MULTIPART_HEADERS ) ) { Value specOpParam = getOperationSpecificParameterFirstValue( message.operationName(), Parameters.MULTIPART_HEADERS ); if ( specOpParam.hasChildren( "partName" ) ) { ValueVector partNames = specOpParam.getChildren( "partName" ); for( int p = 0; p < partNames.size(); p++ ) { if ( partNames.get( p ).hasChildren( "part" ) ) { if ( partNames.get( p ).getFirstChild( "part" ).strValue().equals( entry.getKey() ) ) { isBinary = true; if ( partNames.get( p ).hasChildren( "filename" ) ) { builder.append( "; filename=\"" ).append( partNames.get( p ).getFirstChild( "filename" ).strValue() ).append( "\"" ); } if ( partNames.get( p ).hasChildren( "contentType" ) ) { builder.append( HttpUtils.CRLF ).append( "Content-Type:" ).append( partNames.get( p ).getFirstChild( "contentType" ).strValue() ); } } } } } } builder.append( HttpUtils.CRLF ).append( HttpUtils.CRLF ); if ( isBinary ) { bStream.write( builder.toString().getBytes( charset ) ); bStream.write( entry.getValue().first().byteArrayValue().getBytes() ); builder.delete( 0, builder.length() - 1 ); builder.append( HttpUtils.CRLF ); } else { builder.append( entry.getValue().first().strValue() ).append( HttpUtils.CRLF ); } } } builder.append( "--" + BOUNDARY + "--" ); bStream.write( builder.toString().getBytes( charset )); ret.content = new ByteArray( bStream.toByteArray() ); } else if ( "x-www-form-urlencoded".equals( format ) ) { ret.contentType = "application/x-www-form-urlencoded"; Iterator< Entry< String, ValueVector > > it = message.value().children().entrySet().iterator(); StringBuilder builder = new StringBuilder(); if ( message.isFault() ) { builder.append( "faultName=" ); builder.append( URLEncoder.encode( message.fault().faultName(), HttpUtils.URL_DECODER_ENC ) ); builder.append( "&data=" ); builder.append( URLEncoder.encode( message.fault().value().strValue(), HttpUtils.URL_DECODER_ENC ) ); } else { Entry< String, ValueVector > entry; while( it.hasNext() ) { entry = it.next(); builder.append( URLEncoder.encode( entry.getKey(), HttpUtils.URL_DECODER_ENC ) ) .append( "=" ) .append( URLEncoder.encode( entry.getValue().first().strValue(), HttpUtils.URL_DECODER_ENC ) ); if ( it.hasNext() ) { builder.append( '&' ); } } } ret.content = new ByteArray( builder.toString().getBytes( charset ) ); } else if ( "text/x-gwt-rpc".equals( format ) ) { ret.contentType = "text/x-gwt-rpc"; try { if ( inInputPort ) { // It's a response if ( message.isFault() ) { ret.content = new ByteArray( RPC.encodeResponseForFailure( JolieService.class.getMethods()[0], JolieGWTConverter.jolieToGwtFault( message.fault() ) ).getBytes( charset ) ); } else { joliex.gwt.client.Value v = new joliex.gwt.client.Value(); JolieGWTConverter.jolieToGwtValue( message.value(), v ); ret.content = new ByteArray( RPC.encodeResponseForSuccess( JolieService.class.getMethods()[0], v ).getBytes( charset ) ); } } else { // It's a request throw new IOException( "Sending requests to a GWT server is currently unsupported." ); } } catch( SerializationException e ) { throw new IOException( e ); } } else if ( "json".equals( format ) ) { ret.contentType = ContentTypes.APPLICATION_JSON; StringBuilder jsonStringBuilder = new StringBuilder(); if ( message.isFault() ) { Value error = message.value().getFirstChild( "error" ); error.getFirstChild( "code" ).setValue( -32000 ); error.getFirstChild( "message" ).setValue( message.fault().faultName() ); error.getChildren( "data" ).set( 0, message.fault().value() ); JsUtils.faultValueToJsonString( message.value(), getSendType( message ), jsonStringBuilder ); } else { JsUtils.valueToJsonString( message.value(), true, getSendType( message ), jsonStringBuilder ); } ret.content = new ByteArray( jsonStringBuilder.toString().getBytes( charset ) ); } else if ( "raw".equals( format ) ) { ret.contentType = "text/plain"; if ( message.isFault() ) { ret.content = new ByteArray( message.fault().value().strValue().getBytes( charset ) ); } else { ret.content = new ByteArray( message.value().strValue().getBytes( charset ) ); } } return ret; } private static boolean isLocationNeeded( int statusCode ) { return locationRequiredStatusCodes.contains( statusCode ); } private void send_appendResponseUserHeader( CommMessage message, StringBuilder headerBuilder ) { Value responseHeaderParameters = null; if ( hasOperationSpecificParameter(message.operationName(), Parameters.RESPONSE_USER) ) { responseHeaderParameters = getOperationSpecificParameterFirstValue(message.operationName(), Parameters.RESPONSE_USER); if ( ( responseHeaderParameters != null ) && ( responseHeaderParameters.hasChildren(Parameters.HEADER_USER) ) ) { for ( Entry< String, ValueVector > entry : responseHeaderParameters.getFirstChild(Parameters.HEADER_USER).children().entrySet() ) headerBuilder.append( entry.getKey() ).append(": ").append( entry.getValue().first().strValue() ).append( HttpUtils.CRLF ); } } responseHeaderParameters = null; if ( hasParameter(Parameters.RESPONSE_USER) ) { responseHeaderParameters = getParameterFirstValue(Parameters.RESPONSE_USER); if ( ( responseHeaderParameters != null ) && ( responseHeaderParameters.hasChildren(Parameters.HEADER_USER) ) ) { for ( Entry< String, ValueVector > entry : responseHeaderParameters.getFirstChild(Parameters.HEADER_USER).children().entrySet() ) headerBuilder.append( entry.getKey() ).append(": ").append( entry.getValue().first().strValue() ).append( HttpUtils.CRLF ); } } } private void send_appendResponseHeaders( CommMessage message, StringBuilder headerBuilder ) { int statusCode = DEFAULT_STATUS_CODE; String statusDescription = null; if( hasParameter( Parameters.STATUS_CODE ) ) { statusCode = getIntParameter( Parameters.STATUS_CODE ); if ( !statusCodeDescriptions.containsKey( statusCode ) ) { Interpreter.getInstance().logWarning( "HTTP protocol for operation " + message.operationName() + " is sending a message with status code " + statusCode + ", which is not in the HTTP specifications." ); statusDescription = "Internal Server Error"; } else if ( isLocationNeeded( statusCode ) && !hasParameter( Parameters.REDIRECT ) ) { // if statusCode is a redirection code, location parameter is needed Interpreter.getInstance().logWarning( "HTTP protocol for operation " + message.operationName() + " is sending a message with status code " + statusCode + ", which expects a redirect parameter but the latter is not set." ); } } else if ( hasParameter( Parameters.REDIRECT ) ) { statusCode = DEFAULT_REDIRECTION_STATUS_CODE; } if ( statusDescription == null ) { statusDescription = statusCodeDescriptions.get( statusCode ); } headerBuilder.append( "HTTP/1.1 " + statusCode + " " + statusDescription + HttpUtils.CRLF ); // if redirect has been set, the redirect location parameter is set if ( hasParameter( Parameters.REDIRECT ) ) { headerBuilder.append( "Location: " + getStringParameter( Parameters.REDIRECT ) + HttpUtils.CRLF ); } send_appendSetCookieHeader( message, headerBuilder ); headerBuilder.append( "Server: Jolie" ).append( HttpUtils.CRLF ); StringBuilder cacheControlHeader = new StringBuilder(); if ( hasParameter( Parameters.CACHE_CONTROL ) ) { Value cacheControl = getParameterFirstValue( Parameters.CACHE_CONTROL ); if ( cacheControl.hasChildren( "maxAge" ) ) { cacheControlHeader.append( "max-age=" ).append( cacheControl.getFirstChild( "maxAge" ).intValue() ); } } if ( cacheControlHeader.length() > 0 ) { headerBuilder.append( "Cache-Control: " ).append( cacheControlHeader ).append( HttpUtils.CRLF ); } } private static void send_appendRequestMethod( Method method, StringBuilder headerBuilder ) { headerBuilder.append( method.id() ); } private void send_appendRequestPath( CommMessage message, Method method, String qsFormat, StringBuilder headerBuilder ) throws IOException { String path = uri.getRawPath(); if ( uri.getScheme().equals( "localsocket" ) || path == null || path.isEmpty() || checkBooleanParameter( Parameters.DROP_URI_PATH, false ) ) { headerBuilder.append( '/' ); } else { if ( path.charAt( 0 ) != '/' ) { headerBuilder.append( '/' ); } headerBuilder.append( path ); } if ( hasOperationSpecificParameter( message.operationName(), Parameters.ALIAS ) ) { String alias = getOperationSpecificStringParameter( message.operationName(), Parameters.ALIAS ); send_appendParsedAlias( alias, message.value(), headerBuilder ); } else { headerBuilder.append( message.operationName() ); } if ( method == Method.GET ) { if ( qsFormat.equals( "json" ) ) { send_appendJsonQueryString( message, headerBuilder ); } else { send_appendQuerystring( message.value(), headerBuilder ); } } } private static void send_appendAuthorizationHeader( CommMessage message, StringBuilder headerBuilder ) { if ( message.value().hasChildren( jolie.lang.Constants.Predefined.HTTP_BASIC_AUTHENTICATION.token().content() ) ) { Value v = message.value().getFirstChild( jolie.lang.Constants.Predefined.HTTP_BASIC_AUTHENTICATION.token().content() ); //String realm = v.getFirstChild( "realm" ).strValue(); String userpass = v.getFirstChild( "userid" ).strValue() + ":" + v.getFirstChild( "password" ).strValue(); Base64.Encoder encoder = Base64.getEncoder(); userpass = encoder.encodeToString( userpass.getBytes() ); headerBuilder.append( "Authorization: Basic " ).append( userpass ).append( HttpUtils.CRLF ); } } private void send_appendRequestUserHeader( CommMessage message, StringBuilder headerBuilder ) { Value responseHeaderParameters = null; if ( hasOperationSpecificParameter(message.operationName(), Parameters.REQUEST_USER ) ) { responseHeaderParameters = getOperationSpecificParameterFirstValue(message.operationName(), Parameters.RESPONSE_USER); if ( ( responseHeaderParameters != null ) && ( responseHeaderParameters.hasChildren(Parameters.HEADER_USER) ) ) { for ( Entry< String, ValueVector > entry : responseHeaderParameters.getFirstChild(Parameters.HEADER_USER).children().entrySet() ) headerBuilder.append( entry.getKey() ).append(": ").append( entry.getValue().first().strValue() ).append( HttpUtils.CRLF ); } } responseHeaderParameters = null; if ( hasParameter(Parameters.RESPONSE_USER) ){ responseHeaderParameters = getParameterFirstValue(Parameters.REQUEST_USER); if ( ( responseHeaderParameters != null ) && ( responseHeaderParameters.hasChildren(Parameters.HEADER_USER) ) ) { for ( Entry< String, ValueVector > entry : responseHeaderParameters.getFirstChild(Parameters.HEADER_USER).children().entrySet() ) headerBuilder.append( entry.getKey() ).append(": ").append( entry.getValue().first().strValue() ).append( HttpUtils.CRLF ); } } } private void send_appendHeader( StringBuilder headerBuilder ) { Value v = getParameterFirstValue( Parameters.ADD_HEADERS ); if ( v != null ) { if ( v.hasChildren( "header" ) ) { for( Value head : v.getChildren( "header" ) ) { String header = head.strValue() + ": " + head.getFirstChild( "value" ).strValue(); headerBuilder.append( header ).append( HttpUtils.CRLF ); } } } } private Method send_getRequestMethod( CommMessage message ) throws IOException { Method method = hasOperationSpecificParameter( message.operationName(), Parameters.METHOD ) ? Method.fromString( getOperationSpecificStringParameter( message.operationName(), Parameters.METHOD ) ) : hasParameterValue( Parameters.METHOD ) ? Method.fromString( getStringParameter( Parameters.METHOD ) ) : Method.POST; return method; } private void send_appendRequestHeaders( CommMessage message, Method method, String qsFormat, StringBuilder headerBuilder ) throws IOException { send_appendRequestMethod( method, headerBuilder ); headerBuilder.append( ' ' ); send_appendRequestPath( message, method, qsFormat, headerBuilder ); headerBuilder.append( " HTTP/1.1" + HttpUtils.CRLF ); String host = uri.getHost(); if ( uri.getScheme().equals( "localsocket" ) ) { /* in this case we need to replace the localsocket path with a host, that is the default one localhost */ host = "localhost"; } headerBuilder.append( "Host: " + host + HttpUtils.CRLF ); send_appendCookies( message, uri.getHost(), headerBuilder ); send_appendAuthorizationHeader( message, headerBuilder ); if ( checkBooleanParameter( Parameters.COMPRESSION, true ) ) { String requestCompression = getStringParameter( Parameters.REQUEST_COMPRESSION ); if ( requestCompression.equals( "gzip" ) || requestCompression.equals( "deflate" ) ) { encoding = requestCompression; headerBuilder.append( "Accept-Encoding: " + encoding + HttpUtils.CRLF ); } else { headerBuilder.append( "Accept-Encoding: gzip, deflate" + HttpUtils.CRLF ); } } send_appendHeader( headerBuilder ); } private void send_appendGenericHeaders( CommMessage message, EncodedContent encodedContent, String charset, StringBuilder headerBuilder ) throws IOException { if ( checkBooleanParameter( Parameters.KEEP_ALIVE, true ) == false || channel().toBeClosed() ) { channel().setToBeClosed( true ); headerBuilder.append( "Connection: close" + HttpUtils.CRLF ); } if ( checkBooleanParameter( Parameters.CONCURRENT, true ) ) { headerBuilder.append( Headers.JOLIE_MESSAGE_ID ).append( ": " ).append( message.id() ).append( HttpUtils.CRLF ); } String contentType = getStringParameter( Parameters.CONTENT_TYPE ); if ( contentType.length() > 0 ) { encodedContent.contentType = contentType; } encodedContent.contentType = encodedContent.contentType.toLowerCase(); headerBuilder.append( "Content-Type: " + encodedContent.contentType ); if ( charset != null ) { headerBuilder.append( "; charset=" + charset.toLowerCase() ); } headerBuilder.append( HttpUtils.CRLF ); if ( encodedContent.content != null ) { String transferEncoding = getStringParameter( Parameters.CONTENT_TRANSFER_ENCODING ); if ( transferEncoding.length() > 0 ) { headerBuilder.append( "Content-Transfer-Encoding: " + transferEncoding + HttpUtils.CRLF ); } String contentDisposition = getStringParameter( Parameters.CONTENT_DISPOSITION ); if ( contentDisposition.length() > 0 ) { encodedContent.contentDisposition = contentDisposition; headerBuilder.append( "Content-Disposition: " + encodedContent.contentDisposition + HttpUtils.CRLF ); } boolean compression = encoding != null && checkBooleanParameter( Parameters.COMPRESSION, true ); String compressionTypes = getStringParameter( Parameters.COMPRESSION_TYPES, "text/html text/css text/plain text/xml text/x-js text/x-gwt-rpc application/json application/javascript application/x-www-form-urlencoded application/xhtml+xml application/xml" ).toLowerCase(); if ( compression && !compressionTypes.equals( "*" ) && !compressionTypes.contains( encodedContent.contentType ) ) { compression = false; } if ( compression ) { encodedContent.content = HttpUtils.encode( encoding, encodedContent.content, headerBuilder ); } headerBuilder.append( "Content-Length: " + encodedContent.content.size() + HttpUtils.CRLF ); } else { headerBuilder.append( "Content-Length: 0" + HttpUtils.CRLF ); } } private void send_logDebugInfo( CharSequence header, EncodedContent encodedContent, String charset ) throws IOException { if ( checkBooleanParameter( Parameters.DEBUG ) ) { StringBuilder debugSB = new StringBuilder(); debugSB.append( "[HTTP debug] Sending:\n" ); debugSB.append( header ); if ( getParameterVector( Parameters.DEBUG ).first().getFirstChild( "showContent" ).intValue() > 0 && encodedContent.content != null ) { debugSB.append( encodedContent.content.toString( charset ) ); } Interpreter.getInstance().logInfo( debugSB.toString() ); } } @Override public void send_internal( OutputStream ostream, CommMessage message, InputStream istream ) throws IOException { Method method = send_getRequestMethod( message ); String charset = HttpUtils.getCharset( getStringParameter( Parameters.CHARSET, "utf-8" ), null ); String format = send_getFormat(); String contentType = null; StringBuilder headerBuilder = new StringBuilder(); if ( inInputPort ) { // We're responding to a request send_appendResponseHeaders( message, headerBuilder ); send_appendResponseUserHeader( message, headerBuilder ); send_appendHeader( headerBuilder ); } else { // We're sending a notification or a solicit String qsFormat = ""; if ( method == Method.GET && getParameterFirstValue( Parameters.METHOD ).hasChildren( "queryFormat" ) ) { if ( getParameterFirstValue( Parameters.METHOD ).getFirstChild( "queryFormat" ).strValue().equals( "json" ) ) { qsFormat = format = "json"; contentType = ContentTypes.APPLICATION_JSON; } } send_appendRequestUserHeader( message, headerBuilder ); send_appendRequestHeaders( message, method, qsFormat, headerBuilder ); } EncodedContent encodedContent = send_encodeContent( message, method, charset, format ); if ( contentType != null ) { encodedContent.contentType = contentType; } send_appendGenericHeaders( message, encodedContent, charset, headerBuilder ); headerBuilder.append( HttpUtils.CRLF ); send_logDebugInfo( headerBuilder, encodedContent, charset ); inputId = message.operationName(); ostream.write( headerBuilder.toString().getBytes( HttpUtils.URL_DECODER_ENC ) ); if ( encodedContent.content != null && !headRequest ) { ostream.write( encodedContent.content.getBytes() ); } headRequest = false; } @Override public void send( OutputStream ostream, CommMessage message, InputStream istream ) throws IOException { HttpUtils.send( ostream, message, istream, inInputPort, channel(), this ); } private void parseXML( HttpMessage message, Value value, String charset ) throws IOException { try { if ( message.size() > 0 ) { DocumentBuilder builder = docBuilderFactory.newDocumentBuilder(); InputSource src = new InputSource( new ByteArrayInputStream( message.content() ) ); src.setEncoding( charset ); Document doc = builder.parse( src ); XmlUtils.documentToValue( doc, value ); } } catch( ParserConfigurationException pce ) { throw new IOException( pce ); } catch( SAXException saxe ) { throw new IOException( saxe ); } } private static void parseJson( HttpMessage message, Value value, boolean strictEncoding, String charset ) throws IOException { JsUtils.parseJsonIntoValue( new InputStreamReader( new ByteArrayInputStream( message.content() ), charset ), value, strictEncoding ); } private static void parseForm( HttpMessage message, Value value, String charset ) throws IOException { String line = new String( message.content(), charset ); String[] pair; for( String item : line.split( "&" ) ) { pair = item.split( "=", 2 ); if ( pair.length > 1 ) { value.getChildren( URLDecoder.decode( pair[0], HttpUtils.URL_DECODER_ENC ) ).first().setValue( URLDecoder.decode( pair[1], HttpUtils.URL_DECODER_ENC ) ); } } } private void parseMultiPartFormData( HttpMessage message, Value value, String charset ) throws IOException { multiPartFormDataParser = new MultiPartFormDataParser( message, value ); multiPartFormDataParser.parse(); } private static String parseGWTRPC( HttpMessage message, Value value, String charset ) throws IOException { RPCRequest request = RPC.decodeRequest( new String( message.content(), charset ) ); String operationName = (String)request.getParameters()[0]; joliex.gwt.client.Value requestValue = (joliex.gwt.client.Value)request.getParameters()[1]; JolieGWTConverter.gwtToJolieValue( requestValue, value ); return operationName; } private void recv_checkForSetCookie( HttpMessage message, Value value ) throws IOException { if ( hasParameter( Parameters.COOKIES ) ) { String type; Value cookies = getParameterFirstValue( Parameters.COOKIES ); Value cookieConfig; Value v; for( HttpMessage.Cookie cookie : message.setCookies() ) { if ( cookies.hasChildren( cookie.name() ) ) { cookieConfig = cookies.getFirstChild( cookie.name() ); if ( cookieConfig.isString() ) { v = value.getFirstChild( cookieConfig.strValue() ); type = cookieConfig.hasChildren( "type" ) ? cookieConfig.getFirstChild( "type" ).strValue() : "string"; recv_assignCookieValue( cookie.value(), v, type ); } } /*currValue = Value.create(); currValue.getNewChild( "expires" ).setValue( cookie.expirationDate() ); currValue.getNewChild( "path" ).setValue( cookie.path() ); currValue.getNewChild( "name" ).setValue( cookie.name() ); currValue.getNewChild( "value" ).setValue( cookie.value() ); currValue.getNewChild( "domain" ).setValue( cookie.domain() ); currValue.getNewChild( "secure" ).setValue( (cookie.secure() ? 1 : 0) ); cookieVec.add( currValue );*/ } } } private static void recv_assignCookieValue( String cookieValue, Value value, String typeKeyword ) throws IOException { NativeType type = NativeType.fromString( typeKeyword ); if ( NativeType.INT == type ) { try { value.setValue( new Integer( cookieValue ) ); } catch( NumberFormatException e ) { throw new IOException( e ); } } else if ( NativeType.LONG == type ) { try { value.setValue( new Long( cookieValue ) ); } catch( NumberFormatException e ) { throw new IOException( e ); } } else if ( NativeType.STRING == type ) { value.setValue( cookieValue ); } else if ( NativeType.DOUBLE == type ) { try { value.setValue( new Double( cookieValue ) ); } catch( NumberFormatException e ) { throw new IOException( e ); } } else if ( NativeType.BOOL == type ) { value.setValue( Boolean.valueOf( cookieValue ) ); } else { value.setValue( cookieValue ); } } private void recv_checkForCookies( HttpMessage message, DecodedMessage decodedMessage ) throws IOException { Value cookies = null; if ( hasOperationSpecificParameter( decodedMessage.operationName, Parameters.COOKIES ) ) { cookies = getOperationSpecificParameterFirstValue( decodedMessage.operationName, Parameters.COOKIES ); } else if ( hasParameter( Parameters.COOKIES ) ) { cookies = getParameterFirstValue( Parameters.COOKIES ); } if ( cookies != null ) { Value v; String type; for( Entry< String, String > entry : message.cookies().entrySet() ) { if ( cookies.hasChildren( entry.getKey() ) ) { Value cookieConfig = cookies.getFirstChild( entry.getKey() ); if ( cookieConfig.isString() ) { v = decodedMessage.value.getFirstChild( cookieConfig.strValue() ); if ( cookieConfig.hasChildren( "type" ) ) { type = cookieConfig.getFirstChild( "type" ).strValue(); } else { type = "string"; } recv_assignCookieValue( entry.getValue(), v, type ); } } } } } private void recv_checkForGenericHeader( HttpMessage message, DecodedMessage decodedMessage ) throws IOException { Value headers = null; if ( hasOperationSpecificParameter( decodedMessage.operationName, Parameters.HEADERS ) ) { headers = getOperationSpecificParameterFirstValue( decodedMessage.operationName, Parameters.HEADERS ); } else if ( hasParameter( Parameters.HEADERS ) ) { headers = getParameterFirstValue( Parameters.HEADERS ); } if ( headers != null ) { for( String headerName : headers.children().keySet() ) { String headerAlias = headers.getFirstChild( headerName ).strValue(); headerName = headerName.replace( "_", "-" ); decodedMessage.value.getFirstChild( headerAlias ).setValue( message.getPropertyOrEmptyString( headerName ) ); } } } private static void recv_parseQueryString( HttpMessage message, Value value, String contentType, boolean strictEncoding ) throws IOException { if ( message.isGet() && contentType.equals( ContentTypes.APPLICATION_JSON ) ) { recv_parseJsonQueryString( message, value, strictEncoding ); } else { Map< String, Integer > indexes = new HashMap<>(); String queryString = message.requestPath(); String[] kv = queryString.split( "\\?", 2 ); Integer index; if ( kv.length > 1 ) { queryString = kv[1]; String[] params = queryString.split( "&" ); for( String param : params ) { String[] ikv = param.split( "=", 2 ); if ( ikv.length > 1 ) { index = indexes.get( ikv[0] ); if ( index == null ) { index = 0; indexes.put( ikv[0], index ); } // the query string was already URL decoded by the HttpParser value.getChildren( ikv[0] ).get( index ).setValue( ikv[1] ); indexes.put( ikv[0], index + 1 ); } } } } } private static void recv_parseJsonQueryString( HttpMessage message, Value value, boolean strictEncoding ) throws IOException { String queryString = message.requestPath(); String[] kv = queryString.split( "\\?", 2 ); if ( kv.length > 1 ) { // the query string was already URL decoded by the HttpParser JsUtils.parseJsonIntoValue( new StringReader( kv[1] ), value, strictEncoding ); } } /* * Prints debug information about a received message */ private void recv_logDebugInfo( HttpMessage message, String charset ) throws IOException { StringBuilder debugSB = new StringBuilder(); debugSB.append( "[HTTP debug] Receiving:\n" ); debugSB.append( "HTTP Code: " + message.statusCode() + "\n" ); debugSB.append( "Resource: " + message.requestPath() + "\n" ); debugSB.append( "--> Header properties\n" ); for( Entry< String, String > entry : message.properties() ) { debugSB.append( '\t' + entry.getKey() + ": " + entry.getValue() + '\n' ); } for( HttpMessage.Cookie cookie : message.setCookies() ) { debugSB.append( "\tset-cookie: " + cookie.toString() + '\n' ); } for( Entry< String, String > entry : message.cookies().entrySet() ) { debugSB.append( "\tcookie: " + entry.getKey() + '=' + entry.getValue() + '\n' ); } if ( getParameterFirstValue( Parameters.DEBUG ).getFirstChild( "showContent" ).intValue() > 0 && message.size() > 0 ) { debugSB.append( "--> Message content\n" ); debugSB.append( new String( message.content(), charset ) ); } Interpreter.getInstance().logInfo( debugSB.toString() ); } private void recv_parseRequestFormat( String type ) throws IOException { responseFormat = null; if ( "text/xml".equals( type ) ) { responseFormat = "xml"; } else if ( "text/x-gwt-rpc".equals( type ) ) { responseFormat = "text/x-gwt-rpc"; } else if ( ContentTypes.APPLICATION_JSON.equals( type ) ) { responseFormat = "json"; } } private void recv_parseMessage( HttpMessage message, DecodedMessage decodedMessage, String type, String charset ) throws IOException { final String operationName = message.isResponse() ? inputId : decodedMessage.operationName; if ( getOperationSpecificStringParameter( operationName, Parameters.FORCE_CONTENT_DECODING ).equals( "string" ) ) { decodedMessage.value.setValue( new String( message.content(), charset ) ); } else if ( "text/html".equals( type ) ) { decodedMessage.value.setValue( new String( message.content(), charset ) ); } else if ( "application/x-www-form-urlencoded".equals( type ) ) { parseForm( message, decodedMessage.value, charset ); } else if ( "text/xml".equals( type ) || type.contains( "xml" ) ) { parseXML( message, decodedMessage.value, charset ); } else if ( "text/x-gwt-rpc".equals( type ) ) { decodedMessage.operationName = parseGWTRPC( message, decodedMessage.value, charset ); } else if ( "multipart/form-data".equals( type ) ) { parseMultiPartFormData( message, decodedMessage.value, charset ); } else if ( "application/octet-stream".equals( type ) || type.startsWith( "image/" ) || "application/zip".equals( type ) ) { decodedMessage.value.setValue( new ByteArray( message.content() ) ); } else if ( ContentTypes.APPLICATION_JSON.equals( type ) || type.contains( "json" ) ) { boolean strictEncoding = checkStringParameter( Parameters.JSON_ENCODING, "strict" ); parseJson( message, decodedMessage.value, strictEncoding, charset ); } else { decodedMessage.value.setValue( new String( message.content(), charset ) ); } } private String getDefaultOperation( HttpMessage.Type t ) { if ( hasParameter( Parameters.DEFAULT_OPERATION ) ) { Value dParam = getParameterFirstValue( Parameters.DEFAULT_OPERATION ); String method = HttpUtils.httpMessageTypeToString( t ); if ( method == null || dParam.hasChildren( method ) == false ) { return dParam.strValue(); } else { return dParam.getFirstChild( method ).strValue(); } } return null; } private void recv_checkReceivingOperation( HttpMessage message, DecodedMessage decodedMessage ) { if ( decodedMessage.operationName == null ) { String requestPath = message.requestPath().split( "\\?", 2 )[0]; decodedMessage.operationName = requestPath.substring( 1 ); Matcher m = LocationParser.RESOURCE_SEPARATOR_PATTERN.matcher( decodedMessage.operationName ); if ( m.find() ) { int resourceStart = m.end(); if ( m.find() ) { decodedMessage.resourcePath = requestPath.substring( resourceStart, m.start() + 1 ); decodedMessage.operationName = requestPath.substring( m.end() + 1, requestPath.length() ); } } } if ( decodedMessage.resourcePath.equals( "/" ) && !channel().parentInputPort().canHandleInputOperation( decodedMessage.operationName ) ) { String defaultOpId = getDefaultOperation( message.type() ); if ( defaultOpId != null ) { Value body = decodedMessage.value; decodedMessage.value = Value.create(); decodedMessage.value.getChildren( "data" ).add( body ); decodedMessage.value.getFirstChild( "operation" ).setValue( decodedMessage.operationName ); decodedMessage.value.setFirstChild( "requestUri", message.requestPath() ); if ( message.userAgent() != null ) { decodedMessage.value.getFirstChild( Parameters.USER_AGENT ).setValue( message.userAgent() ); } Value cookies = decodedMessage.value.getFirstChild( "cookies" ); for( Entry< String, String > cookie : message.cookies().entrySet() ) { cookies.getFirstChild( cookie.getKey() ).setValue( cookie.getValue() ); } decodedMessage.operationName = defaultOpId; } } } private void recv_checkForMultiPartHeaders( DecodedMessage decodedMessage ) { if ( multiPartFormDataParser != null ) { String target; for( Entry< String, MultiPartFormDataParser.PartProperties > entry : multiPartFormDataParser.getPartPropertiesSet() ) { if ( entry.getValue().filename() != null ) { target = getMultipartHeaderForPart( decodedMessage.operationName, entry.getKey() ); if ( target != null ) { decodedMessage.value.getFirstChild( target ).setValue( entry.getValue().filename() ); } } } multiPartFormDataParser = null; } } private void recv_checkForMessageProperties( HttpMessage message, DecodedMessage decodedMessage ) throws IOException { recv_checkForCookies( message, decodedMessage ); recv_checkForGenericHeader( message, decodedMessage ); recv_checkForMultiPartHeaders( decodedMessage ); if ( message.userAgent() != null && hasParameter( Parameters.USER_AGENT ) ) { getParameterFirstValue( Parameters.USER_AGENT ).setValue( message.userAgent() ); } if ( getParameterVector( Parameters.HOST ) != null ) { getParameterFirstValue( Parameters.HOST ).setValue( message.getPropertyOrEmptyString( Parameters.HOST ) ); } } private static class DecodedMessage { private String operationName = null; private Value value = Value.create(); private String resourcePath = "/"; private long id = CommMessage.GENERIC_ID; } private void recv_checkForStatusCode( HttpMessage message ) { if ( hasParameter( Parameters.STATUS_CODE ) ) { getParameterFirstValue( Parameters.STATUS_CODE ).setValue( message.statusCode() ); } } @Override public CommMessage recv_internal( InputStream istream, OutputStream ostream ) throws IOException { HttpMessage message = new HttpParser( istream ).parse(); String charset = HttpUtils.getCharset( null, message ); CommMessage retVal = null; DecodedMessage decodedMessage = new DecodedMessage(); HttpUtils.recv_checkForChannelClosing( message, channel() ); if ( checkBooleanParameter( Parameters.DEBUG ) ) { recv_logDebugInfo( message, charset ); } recv_checkForStatusCode( message ); encoding = message.getProperty( "accept-encoding" ); headRequest = inInputPort && message.isHead(); String contentType = DEFAULT_CONTENT_TYPE; if ( message.getProperty( "content-type" ) != null ) { contentType = message.getProperty( "content-type" ).split( ";", 2 )[0].toLowerCase(); } // URI parameter parsing if ( message.requestPath() != null ) { boolean strictEncoding = checkStringParameter( Parameters.JSON_ENCODING, "strict" ); recv_parseQueryString( message, decodedMessage.value, contentType, strictEncoding ); } recv_parseRequestFormat( contentType ); if ( !message.isResponse() ) { recv_checkReceivingOperation( message, decodedMessage ); } if ( !message.isGet() && !message.isHead() && !message.isDelete() ) { // body parsing if ( message.size() > 0 ) { recv_parseMessage( message, decodedMessage, contentType, charset ); } } if ( checkBooleanParameter( Parameters.CONCURRENT ) ) { String messageId = message.getProperty( Headers.JOLIE_MESSAGE_ID ); if ( messageId != null ) { try { decodedMessage.id = Long.parseLong( messageId ); } catch( NumberFormatException e ) {} } } if ( message.isResponse() ) { String responseHeader = ""; if ( hasParameter( Parameters.RESPONSE_HEADER ) || hasOperationSpecificParameter( inputId, Parameters.RESPONSE_HEADER ) ) { if ( hasOperationSpecificParameter( inputId, Parameters.RESPONSE_HEADER ) ) { responseHeader = getOperationSpecificStringParameter( inputId, Parameters.RESPONSE_HEADER ); } else { responseHeader = getStringParameter( Parameters.RESPONSE_HEADER ); } for( Entry<String, String> param : message.properties() ) { decodedMessage.value.getFirstChild( responseHeader ).getFirstChild( param.getKey() ).setValue( param.getValue() ); } decodedMessage.value.getFirstChild( responseHeader ).getFirstChild( Parameters.STATUS_CODE ).setValue( message.statusCode() ); } recv_checkForSetCookie( message, decodedMessage.value ); retVal = new CommMessage( decodedMessage.id, inputId, decodedMessage.resourcePath, decodedMessage.value, null ); } else if ( message.isError() == false ) { recv_checkForMessageProperties( message, decodedMessage ); retVal = new CommMessage( decodedMessage.id, decodedMessage.operationName, decodedMessage.resourcePath, decodedMessage.value, null ); } if ( retVal != null && "/".equals( retVal.resourcePath() ) && channel().parentPort() != null && (channel().parentPort().getInterface().containsOperation( retVal.operationName() ) || channel().parentInputPort().getAggregatedOperation( retVal.operationName() ) != null) ) { try { // The message is for this service boolean hasInput = false; OneWayTypeDescription oneWayTypeDescription = null; if ( channel().parentInputPort() != null ) { if ( channel().parentInputPort().getAggregatedOperation( retVal.operationName() ) != null ) { oneWayTypeDescription = channel().parentInputPort().getAggregatedOperation( retVal.operationName() ).getOperationTypeDescription().asOneWayTypeDescription(); hasInput = true; } } if ( !hasInput ) { Interface iface = channel().parentPort().getInterface(); oneWayTypeDescription = iface.oneWayOperations().get( retVal.operationName() ); } if ( oneWayTypeDescription != null ) { // We are receiving a One-Way message oneWayTypeDescription.requestType().cast( retVal.value() ); } else { hasInput = false; RequestResponseTypeDescription rrTypeDescription = null; if ( channel().parentInputPort() != null ) { if ( channel().parentInputPort().getAggregatedOperation( retVal.operationName() ) != null ) { rrTypeDescription = channel().parentInputPort().getAggregatedOperation( retVal.operationName() ).getOperationTypeDescription().asRequestResponseTypeDescription(); hasInput = true; } } if ( !hasInput ) { Interface iface = channel().parentPort().getInterface(); rrTypeDescription = iface.requestResponseOperations().get( retVal.operationName() ); } if ( retVal.isFault() ) { Type faultType = rrTypeDescription.faults().get( retVal.fault().faultName() ); if ( faultType != null ) { faultType.cast( retVal.value() ); } } else { if ( message.isResponse() ) { rrTypeDescription.responseType().cast( retVal.value() ); } else { rrTypeDescription.requestType().cast( retVal.value() ); } } } } catch( TypeCastingException e ) { // TODO: do something here? } } return retVal; } @Override public CommMessage recv( InputStream istream, OutputStream ostream ) throws IOException { return HttpUtils.recv( istream, ostream, inInputPort, channel(), this ); } private Type getSendType( CommMessage message ) throws IOException { Type ret = null; if ( channel().parentPort() == null ) { throw new IOException( "Could not retrieve communication port for HTTP protocol" ); } OperationTypeDescription opDesc = channel().parentPort().getOperationTypeDescription( message.operationName(), Constants.ROOT_RESOURCE_PATH ); if ( opDesc == null ) { return null; } if ( opDesc.asOneWayTypeDescription() != null ) { if ( message.isFault() ) { ret = Type.UNDEFINED; } else { OneWayTypeDescription ow = opDesc.asOneWayTypeDescription(); ret = ow.requestType(); } } else if ( opDesc.asRequestResponseTypeDescription() != null ) { RequestResponseTypeDescription rr = opDesc.asRequestResponseTypeDescription(); if ( message.isFault() ) { ret = rr.getFaultType( message.fault().faultName() ); if ( ret == null ) { ret = Type.UNDEFINED; } } else { ret = ( inInputPort ) ? rr.responseType() : rr.requestType(); } } return ret; } }
package jolie.net; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.net.URI; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import javax.xml.XMLConstants; import javax.xml.namespace.QName; import javax.xml.soap.Detail; import javax.xml.soap.DetailEntry; import javax.xml.soap.MessageFactory; import javax.xml.soap.Name; import javax.xml.soap.SOAPBody; import javax.xml.soap.SOAPBodyElement; import javax.xml.soap.SOAPConstants; import javax.xml.soap.SOAPElement; import javax.xml.soap.SOAPEnvelope; import javax.xml.soap.SOAPException; import javax.xml.soap.SOAPFault; import javax.xml.soap.SOAPHeader; import javax.xml.soap.SOAPHeaderElement; import javax.xml.soap.SOAPMessage; import jolie.Constants; import jolie.Interpreter; import jolie.runtime.FaultException; import jolie.runtime.InputOperation; import jolie.runtime.InvalidIdException; import jolie.runtime.Value; import jolie.runtime.ValueVector; import jolie.runtime.VariablePath; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; import com.sun.xml.xsom.XSAttributeDecl; import com.sun.xml.xsom.XSAttributeUse; import com.sun.xml.xsom.XSComplexType; import com.sun.xml.xsom.XSContentType; import com.sun.xml.xsom.XSElementDecl; import com.sun.xml.xsom.XSModelGroup; import com.sun.xml.xsom.XSModelGroupDecl; import com.sun.xml.xsom.XSParticle; import com.sun.xml.xsom.XSSchema; import com.sun.xml.xsom.XSSchemaSet; import com.sun.xml.xsom.XSTerm; import com.sun.xml.xsom.XSType; import com.sun.xml.xsom.parser.XSOMParser; import java.io.ByteArrayInputStream; import java.util.Vector; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.Source; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamSource; import javax.xml.validation.Schema; import javax.xml.validation.SchemaFactory; import jolie.net.http.HttpMessage; import jolie.net.http.HttpParser; import org.w3c.dom.Document; import org.xml.sax.InputSource; /** Implements the SOAP over HTTP protocol. * * @author Fabrizio Montesi * * 2006 - Fabrizio Montesi, Mauro Silvagni: first write. * 2007 - Fabrizio Montesi: rewritten from scratch, exploiting new JOLIE capabilities. * 2008 - Fabrizio Montesi: initial support for schemas. * 2008 - Claudio Guidi: initial support for WS-Addressing. * */ public class SoapProtocol extends CommProtocol { private String inputId = null; final private Interpreter interpreter; final private MessageFactory messageFactory; private XSSchemaSet schemaSet = null; private URI uri = null; private boolean received = false; final private static String CRLF = new String( new char[] { 13, 10 } ); public SoapProtocol clone() { SoapProtocol ret = new SoapProtocol( configurationPath, uri, interpreter, messageFactory ); ret.inputId = inputId; ret.schemaSet = schemaSet; return ret; } private SoapProtocol( VariablePath configurationPath, URI uri, Interpreter interpreter, MessageFactory messageFactory ) { super( configurationPath ); this.uri = uri; this.interpreter = interpreter; this.messageFactory = messageFactory; } public SoapProtocol( VariablePath configurationPath, URI uri, Interpreter interpreter ) throws SOAPException { super( configurationPath ); this.uri = uri; this.interpreter = interpreter; this.messageFactory = MessageFactory.newInstance( SOAPConstants.SOAP_1_1_PROTOCOL ); } private Map< String, String > namespacePrefixMap = new HashMap< String, String > (); private XSSchemaSet getSchemaSet() throws IOException, SAXException { if ( schemaSet == null ) { ValueVector vec = getParameterVector( "schema" ); if ( vec.size() > 0 ) { XSOMParser schemaParser = new XSOMParser(); for( Value v : vec ) schemaParser.parse( new File( v.strValue() ) ); schemaSet = schemaParser.getResult(); String nsPrefix = "jolie"; int i = 1; for( XSSchema schema : schemaSet.getSchemas() ) { if ( !schema.getTargetNamespace().equals( XMLConstants.W3C_XML_SCHEMA_NS_URI ) ) namespacePrefixMap.put( schema.getTargetNamespace(), nsPrefix + i++ ); } } } return schemaSet; } private void initNamespacePrefixes( SOAPElement element ) throws SOAPException { for( Entry< String, String > entry : namespacePrefixMap.entrySet() ) element.addNamespaceDeclaration( entry.getValue(), entry.getKey() ); } private static void valueToSOAPElement( Value value, SOAPElement element, SOAPEnvelope soapEnvelope ) throws SOAPException { //String type = null; if ( value.isDefined() ) { /*if ( value.isInt() ) type = "int"; else type = "string"; element.addAttribute( soapEnvelope.createName( "type" ), "xsd:" + type );*/ element.addTextNode( value.strValue() ); } Map< String, ValueVector > attrs = getAttributesOrNull( value ); if ( attrs != null ) { for( Entry< String, ValueVector > attrEntry : attrs.entrySet() ) { element.addAttribute( soapEnvelope.createName( attrEntry.getKey() ), attrEntry.getValue().first().strValue() ); } } for( Entry< String, ValueVector > entry : value.children().entrySet() ) { if ( !entry.getKey().startsWith( "@" ) ) { for( Value val : entry.getValue() ) { valueToSOAPElement( val, element.addChildElement( entry.getKey() ), soapEnvelope ); } } } } private static Map< String, ValueVector > getAttributesOrNull( Value value ) { Map< String, ValueVector > ret = null; ValueVector vec = value.children().get( Constants.Predefined.ATTRIBUTES.token().content() ); if ( vec != null && vec.size() > 0 ) ret = vec.first().children(); if ( ret == null ) ret = new HashMap< String, ValueVector >(); return ret; } private static Value getAttributeOrNull( Value value, String attrName ) { Value ret = null; Map< String, ValueVector > attrs = getAttributesOrNull( value ); if ( attrs != null ) { ValueVector vec = attrs.get( attrName ); if ( vec != null && vec.size() > 0 ) ret = vec.first(); } return ret; } private static Value getAttribute( Value value, String attrName ) { return value.getChildren( Constants.Predefined.ATTRIBUTES.token().content() ).first() .getChildren( attrName ).first(); } private String getPrefixOrNull( XSAttributeDecl decl ) { if ( decl.getOwnerSchema().attributeFormDefault() ) return namespacePrefixMap.get( decl.getOwnerSchema().getTargetNamespace() ); return null; } private String getPrefixOrNull( XSElementDecl decl ) { if ( decl.getOwnerSchema().elementFormDefault() ) return namespacePrefixMap.get( decl.getOwnerSchema().getTargetNamespace() ); return null; } private String getPrefix( XSElementDecl decl ) { return namespacePrefixMap.get( decl.getOwnerSchema().getTargetNamespace() ); } private void valueToTypedSOAP( Value value, XSElementDecl xsDecl, SOAPElement element, SOAPEnvelope envelope ) throws SOAPException { valueToTypedSOAP( value, xsDecl, element, envelope, true ); } private void valueToTypedSOAP( Value value, XSElementDecl xsDecl, SOAPElement element, SOAPEnvelope envelope, boolean first // Ugly fix! This should be removed as soon as another option arises. ) throws SOAPException { XSType type = xsDecl.getType(); if ( type.isSimpleType() ) { element.addTextNode( value.strValue() ); } else if ( type.isComplexType() ) { String name; Value currValue; XSComplexType complexT = type.asComplexType(); // Iterate over attributes Collection< ? extends XSAttributeUse > attributeUses = complexT.getAttributeUses(); for( XSAttributeUse attrUse : attributeUses ) { name = attrUse.getDecl().getName(); if ( (currValue=getAttributeOrNull( value, name )) != null ) { QName attrName = envelope.createQName( name, getPrefixOrNull( attrUse.getDecl() ) ); element.addAttribute( attrName, currValue.strValue() ); } } XSParticle particle; XSContentType contentT; contentT = complexT.getContentType(); if ( contentT.asSimpleType() != null ) { element.addTextNode( value.strValue() ); } else if ( (particle=contentT.asParticle()) != null ) { XSTerm term = particle.getTerm(); // XSElementDecl elementDecl; XSModelGroupDecl modelGroupDecl; XSModelGroup modelGroup = null; //int size = value.children().size(); //if ( particle.getMinOccurs() // It's a simple element, repeated some times /*if ( (elementDecl=term.asElementDecl()) != null ) { } else */if ( (modelGroupDecl=term.asModelGroupDecl()) != null ) { modelGroup = modelGroupDecl.getModelGroup(); } else if ( term.isModelGroup() ) modelGroup = term.asModelGroup(); if ( modelGroup != null ) { XSModelGroup.Compositor compositor = modelGroup.getCompositor(); if ( compositor.equals( XSModelGroup.SEQUENCE ) ) { XSParticle[] children = modelGroup.getChildren(); XSTerm currTerm; XSElementDecl currElementDecl; Value v; ValueVector vec; String prefix; for( int i = 0; i < children.length; i++ ) { currTerm = children[i].getTerm(); if ( currTerm.isElementDecl() ) { currElementDecl = currTerm.asElementDecl(); name = currElementDecl.getName(); prefix = ( first ) ? getPrefix( currElementDecl ) : getPrefixOrNull( currElementDecl ); SOAPElement childElement = null; if ( prefix == null ) childElement = element.addChildElement( name ); else childElement = element.addChildElement( name, prefix ); if ( (vec=value.children().get( name )) != null ) { v = vec.remove( 0 ); valueToTypedSOAP( v, currElementDecl, childElement, envelope, false ); } else if ( children[i].getMinOccurs() > 0 ) { // TODO improve this error message. throw new SOAPException( "Invalid variable structure: expected " + name ); } } } } } } } } public void send( OutputStream ostream, CommMessage message ) throws IOException { try { inputId = message.operationName(); if ( received ) { // We're responding to a request inputId += "Response"; } String messageNamespace = getParameterVector( "namespace" ).first().strValue(); SOAPMessage soapMessage = messageFactory.createMessage(); SOAPEnvelope soapEnvelope = soapMessage.getSOAPPart().getEnvelope(); SOAPBody soapBody = soapEnvelope.getBody(); if ( getParameterVector( "wsAddressing" ).first().intValue() == 1 ) { SOAPHeader soapHeader = soapEnvelope.getHeader(); // WS-Addressing namespace soapHeader.addNamespaceDeclaration( "wsa", "http://schemas.xmlsoap.org/ws/2004/03/addressing" ); // Message ID Name messageIdName = soapEnvelope.createName( "MessageID", "wsa", "http://schemas.xmlsoap.org/ws/2004/03/addressing" ); SOAPHeaderElement messageIdElement = soapHeader.addHeaderElement(messageIdName); // TODO: message ID generation messageIdElement.setValue( "uuid:1" ); // Action element Name actionName = soapEnvelope.createName( "Action", "wsa", "http://schemas.xmlsoap.org/ws/2004/03/addressing" ); SOAPHeaderElement actionElement = soapHeader.addHeaderElement( actionName ); /* TODO: the action element could be specified within the parameter. * Perhaps wsAddressing.action ? * We could also allow for giving a prefix or a suffix to the operation name, * like wsAddressing.action.prefix, wsAddressing.action.suffix */ actionElement.setValue( message.operationName() ); // From element Name fromName = soapEnvelope.createName( "From", "wsa", "http://schemas.xmlsoap.org/ws/2004/03/addressing" ); SOAPHeaderElement fromElement = soapHeader.addHeaderElement( fromName ); Name addressName = soapEnvelope.createName( "Address", "wsa", "http://schemas.xmlsoap.org/ws/2004/03/addressing" ); SOAPElement addressElement = fromElement.addChildElement( addressName ); addressElement.setValue( "http://schemas.xmlsoap.org/ws/2004/03/addressing/role/anonymous" ); // To element } if ( message.isFault() ) { FaultException f = message.fault(); SOAPFault soapFault = soapBody.addFault(); soapFault.setFaultCode( soapEnvelope.createQName( "Server", soapEnvelope.getPrefix() ) ); soapFault.setFaultString( f.getMessage() ); Detail detail = soapFault.addDetail(); DetailEntry de = detail.addDetailEntry( soapEnvelope.createName( f.faultName(), null, messageNamespace ) ); valueToSOAPElement( f.value(), de, soapEnvelope ); } else { XSSchemaSet sSet = getSchemaSet(); XSElementDecl elementDecl; if ( sSet == null || (elementDecl=sSet.getElementDecl( messageNamespace, inputId )) == null ) { Name operationName = soapEnvelope.createName( inputId ); SOAPBodyElement opBody = soapBody.addBodyElement( operationName ); valueToSOAPElement( message.value(), opBody, soapEnvelope ); } else { initNamespacePrefixes( soapEnvelope ); boolean wrapped = true; Value vStyle = getParameterVector( "style" ).first(); if ( "document".equals( vStyle.strValue() ) ) { wrapped = ( vStyle.getChildren( "wrapped" ).first().intValue() > 0 ); } SOAPElement opBody = soapBody; if ( wrapped ) { opBody = soapBody.addBodyElement( soapEnvelope.createName( inputId, namespacePrefixMap.get( elementDecl.getOwnerSchema().getTargetNamespace() ), null ) ); } valueToTypedSOAP( message.value(), elementDecl, opBody, soapEnvelope, !wrapped ); } } ByteArrayOutputStream tmpStream = new ByteArrayOutputStream(); soapMessage.writeTo( tmpStream ); String soapString = CRLF + "<?xml version=\"1.0\" encoding=\"utf-8\"?>" + new String( tmpStream.toByteArray() ); String messageString = new String(); String soapAction = null; InputOperation operation = null; if ( received ) { // We're responding to a request messageString += "HTTP/1.1 200 OK" + CRLF; received = false; } else { // We're sending a notification or a solicit String path = uri.getPath(); if ( path == null || path.length() == 0 ) path = "*"; messageString += "POST " + path + " HTTP/1.1" + CRLF; messageString += "Host: " + uri.getHost() + CRLF; soapAction = "SOAPAction: \"" + messageNamespace + "/" + message.operationName() + '\"' + CRLF; } if ( getParameterVector( "keepAlive" ).first().intValue() != 1 ) { channel.setToBeClosed( true ); messageString += "Connection: close" + CRLF; } //messageString += "Content-Type: application/soap+xml; charset=\"utf-8\"\n"; messageString += "Content-Type: text/xml; charset=\"utf-8\"" + CRLF; messageString += "Content-Length: " + soapString.length() + CRLF; if ( soapAction != null ) messageString += soapAction; messageString += soapString + CRLF; if ( getParameterVector( "debug" ).first().intValue() > 0 ) interpreter.logger().info( "[SOAP debug] Sending:\n" + tmpStream.toString() ); inputId = message.operationName(); Writer writer = new OutputStreamWriter( ostream ); writer.write( messageString ); writer.flush(); } catch( SOAPException se ) { throw new IOException( se ); } catch( SAXException saxe ) { throw new IOException( saxe ); } } private static void xmlNodeToValue( Value value, Node node ) { Node currNode; // Set attributes NamedNodeMap attributes = node.getAttributes(); if ( attributes != null ) { for( int i = 0; i < attributes.getLength(); i++ ) { currNode = attributes.item( i ); getAttribute( value, currNode.getNodeName() ).setValue( currNode.getNodeValue() ); } } // Set children NodeList list = node.getChildNodes(); Value childValue; for( int i = 0; i < list.getLength(); i++ ) { currNode = list.item( i ); switch( currNode.getNodeType() ) { case Node.ELEMENT_NODE: childValue = value.getNewChild( currNode.getLocalName() ); xmlNodeToValue( childValue, currNode ); break; case Node.TEXT_NODE: value.setValue( currNode.getNodeValue() ); break; } } Value attr; if ( (attr=getAttributeOrNull( value, "type" )) != null ) { String type = attr.strValue(); if ( "xsd:int".equals( type ) ) value.setValue( value.intValue() ); else if ( "xsd:double".equals( type ) ) value.setValue( value.doubleValue() ); else if ( "xsd:string".equals( type ) ) value.setValue( value.strValue() ); } } public CommMessage recv( InputStream istream ) throws IOException { HttpParser parser = new HttpParser( istream ); HttpMessage message = parser.parse(); HttpMessage.Version version = message.version(); if ( version == null || version.equals( HttpMessage.Version.HTTP_1_1 ) ) { // The default is to keep the connection open, unless Connection: close is specified if ( message.getPropertyOrEmptyString( "connection" ).equalsIgnoreCase( "close" ) ) channel.setToBeClosed( true ); else channel.setToBeClosed( false ); } else if ( version.equals( HttpMessage.Version.HTTP_1_0 ) ) { // The default is to close the connection, unless Connection: Keep-Alive is specified if ( message.getPropertyOrEmptyString( "connection" ).equalsIgnoreCase( "keep-alive" ) ) channel.setToBeClosed( false ); else channel.setToBeClosed( true ); } CommMessage retVal = null; String messageId = message.getPropertyOrEmptyString( "soapaction" ); FaultException fault = null; Value value = Value.create(); try { if ( message.content() != null ) { SOAPMessage soapMessage = messageFactory.createMessage(); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware( true ); DocumentBuilder builder = factory.newDocumentBuilder(); InputSource src = new InputSource( new ByteArrayInputStream( message.content() ) ); Document doc = builder.parse( src ); DOMSource dom = new DOMSource( doc ); soapMessage.getSOAPPart().setContent( dom ); if ( getParameterVector( "debug" ).first().intValue() > 0 ) { ByteArrayOutputStream tmpStream = new ByteArrayOutputStream(); soapMessage.writeTo( tmpStream ); interpreter.logger().info( "[SOAP debug] Receiving:\n" + tmpStream.toString() ); } SOAPFault soapFault = soapMessage.getSOAPBody().getFault(); if ( soapFault == null ) { messageId = soapMessage.getSOAPBody().getFirstChild().getLocalName(); xmlNodeToValue( value, soapMessage.getSOAPBody().getFirstChild() ); ValueVector schemaPaths = getParameterVector( "schema" ); if ( schemaPaths.size() > 0 ) { Vector< Source > sources = new Vector< Source >(); Value schemaPath; for( int i = 0; i < schemaPaths.size(); i++ ) { schemaPath = schemaPaths.get( i ); if ( schemaPath.getChildren( "validate" ).first().intValue() > 0 ) sources.add( new StreamSource( new File( schemaPaths.get( i ).strValue() ) ) ); } if ( !sources.isEmpty() ) { Schema schema = SchemaFactory.newInstance( XMLConstants.W3C_XML_SCHEMA_NS_URI ) .newSchema( (Source[])sources.toArray() ); schema.newValidator().validate( new DOMSource( soapMessage.getSOAPBody().getFirstChild() ) ); } } } else { String faultName = "UnknownFault"; Value faultValue = Value.create(); Detail d = soapFault.getDetail(); if ( d != null ) { Node n = d.getFirstChild(); if ( n != null ) { faultName = n.getLocalName(); xmlNodeToValue( faultValue, n ); } else { faultValue.setValue( soapFault.getFaultString() ); } } fault = new FaultException( faultName, faultValue ); } } if ( message.type() == HttpMessage.Type.RESPONSE ) { if ( fault != null && message.httpCode() == 500 ) fault = new FaultException( "InternalServerError", "" ); //TODO support resourcePath retVal = new CommMessage( inputId, "/", value, fault ); } else if ( message.type() == HttpMessage.Type.POST || message.type() == HttpMessage.Type.GET ) { if ( messageId.isEmpty() ) throw new IOException( "Received SOAP Message without a specified operation" ); //TODO support resourcePath retVal = new CommMessage( messageId, "/", value, fault ); } } catch( SOAPException se ) { throw new IOException( se ); } catch( ParserConfigurationException pce ) { throw new IOException( pce ); } catch( SAXException saxe ) { //TODO support resourcePath retVal = new CommMessage( messageId, "/", value, new FaultException( "InvalidType" ) ); } received = true; return retVal; } }
package groovy.util; import groovy.lang.Binding; import groovy.lang.GroovyClassLoader; import groovy.lang.Script; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import org.codehaus.groovy.control.CompilationFailedException; import org.codehaus.groovy.runtime.InvokerHelper; /** * Specific script engine able to reload modified scripts as well as dealing properly with dependent scripts. * * @author sam * @author Marc Palmer * @author Guillaume Laforge */ public class GroovyScriptEngine implements ResourceConnector { /** * Simple testing harness for the GSE. Enter script roots as arguments and * then input script names to run them. * * @param urls * @throws Exception */ public static void main(String[] urls) throws Exception { URL[] roots = new URL[urls.length]; for (int i = 0; i < roots.length; i++) { roots[i] = new File(urls[i]).toURL(); } GroovyScriptEngine gse = new GroovyScriptEngine(roots); BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); String line; while (true) { System.out.print("groovy> "); if ((line = br.readLine()) == null || line.equals("quit")) break; try { System.out.println(gse.run(line, new Binding())); } catch (Exception e) { e.printStackTrace(); } } } private URL[] roots; private Map scriptCache = Collections.synchronizedMap(new HashMap()); private ResourceConnector rc; private ClassLoader parentClassLoader = getClass().getClassLoader(); private static class ScriptCacheEntry { private Class scriptClass; private long lastModified; private Map dependencies = new HashMap(); } /** * Get a resource connection as a <code>URLConnection</code> to retrieve a script * from the <code>ResourceConnector</code> * * @param resourceName name of the resource to be retrieved * @return a URLConnection to the resource * @throws ResourceException */ public URLConnection getResourceConnection(String resourceName) throws ResourceException { // Get the URLConnection URLConnection groovyScriptConn = null; ResourceException se = null; for (int i = 0; i < roots.length; i++) { URL scriptURL = null; try { scriptURL = new URL(roots[i], resourceName); groovyScriptConn = scriptURL.openConnection(); // Make sure we can open it, if we can't it doesn't exist. // Could be very slow if there are any non-file:// URLs in there groovyScriptConn.getInputStream(); break; // Now this is a bit unusual } catch (MalformedURLException e) { String message = "Malformed URL: " + roots[i] + ", " + resourceName; if (se == null) { se = new ResourceException(message); } else { se = new ResourceException(message, se); } } catch (IOException e1) { String message = "Cannot open URL: " + scriptURL; if (se == null) { se = new ResourceException(message); } else { se = new ResourceException(message, se); } } } // If we didn't find anything, report on all the exceptions that occurred. if (groovyScriptConn == null) { throw se; } return groovyScriptConn; } /** * The groovy script engine will run groovy scripts and reload them and * their dependencies when they are modified. This is useful for embedding * groovy in other containers like games and application servers. * * @param roots This an array of URLs where Groovy scripts will be stored. They should * be layed out using their package structure like Java classes */ public GroovyScriptEngine(URL[] roots) { this.roots = roots; this.rc = this; } public GroovyScriptEngine(URL[] roots, ClassLoader parentClassLoader) { this(roots); this.parentClassLoader = parentClassLoader; } public GroovyScriptEngine(String[] urls) throws IOException { roots = new URL[urls.length]; for (int i = 0; i < roots.length; i++) { roots[i] = new File(urls[i]).toURL(); } this.rc = this; } public GroovyScriptEngine(String[] urls, ClassLoader parentClassLoader) throws IOException { this(urls); this.parentClassLoader = parentClassLoader; } public GroovyScriptEngine(String url) throws IOException { roots = new URL[1]; roots[0] = new File(url).toURL(); this.rc = this; } public GroovyScriptEngine(String url, ClassLoader parentClassLoader) throws IOException { this(url); this.parentClassLoader = parentClassLoader; } public GroovyScriptEngine(ResourceConnector rc) { this.rc = rc; } public GroovyScriptEngine(ResourceConnector rc, ClassLoader parentClassLoader) { this(rc); this.parentClassLoader = parentClassLoader; } /** * Get the <code>ClassLoader</code> that will serve as the parent ClassLoader of the * {@link GroovyClassLoader} in which scripts will be executed. By default, this is the * ClassLoader that loaded the <code>GroovyScriptEngine</code> class. * * @return parent classloader used to load scripts */ public ClassLoader getParentClassLoader() { return parentClassLoader; } /** * @param parentClassLoader ClassLoader to be used as the parent ClassLoader for scripts executed by the engine */ public void setParentClassLoader(ClassLoader parentClassLoader) { if (parentClassLoader == null) { throw new IllegalArgumentException("The parent class loader must not be null."); } this.parentClassLoader = parentClassLoader; } /** * Get the class of the scriptName in question, so that you can instantiate Groovy objects with caching and reloading. * * @param scriptName * @return the loaded scriptName as a compiled class * @throws ResourceException * @throws ScriptException */ public Class loadScriptByName(String scriptName) throws ResourceException, ScriptException { return loadScriptByName( scriptName, getClass().getClassLoader()); } /** * Get the class of the scriptName in question, so that you can instantiate Groovy objects with caching and reloading. * * @param scriptName * @return the loaded scriptName as a compiled class * @throws ResourceException * @throws ScriptException */ public Class loadScriptByName(String scriptName, ClassLoader parentClassLoader) throws ResourceException, ScriptException { scriptName = scriptName.replace('.', File.separatorChar) + ".groovy"; ScriptCacheEntry entry = updateCacheEntry(scriptName, parentClassLoader); return entry.scriptClass; } /** * Locate the class and reload it or any of its dependencies * * @param scriptName * @param parentClassLoader * @return the scriptName cache entry * @throws ResourceException * @throws ScriptException */ private ScriptCacheEntry updateCacheEntry(String scriptName, final ClassLoader parentClassLoader) throws ResourceException, ScriptException { ScriptCacheEntry entry; scriptName = scriptName.intern(); synchronized (scriptName) { URLConnection groovyScriptConn = rc.getResourceConnection(scriptName); // URL last modified long lastModified = groovyScriptConn.getLastModified(); // Check the cache for the scriptName entry = (ScriptCacheEntry) scriptCache.get(scriptName); // If the entry isn't null check all the dependencies boolean dependencyOutOfDate = false; if (entry != null) { for (Iterator i = entry.dependencies.keySet().iterator(); i.hasNext();) { URLConnection urlc = null; URL url = (URL) i.next(); try { urlc = url.openConnection(); urlc.setDoInput(false); urlc.setDoOutput(false); long dependentLastModified = urlc.getLastModified(); if (dependentLastModified > ((Long) entry.dependencies.get(url)).longValue()) { dependencyOutOfDate = true; break; } } catch (IOException ioe) { dependencyOutOfDate = true; break; } } } if (entry == null || entry.lastModified < lastModified || dependencyOutOfDate) { // Make a new entry entry = new ScriptCacheEntry(); // Closure variable final ScriptCacheEntry finalEntry = entry; // Compile the scriptName into an object GroovyClassLoader groovyLoader = (GroovyClassLoader) AccessController.doPrivileged(new PrivilegedAction() { public Object run() { return new GroovyClassLoader(parentClassLoader) { protected Class findClass(String className) throws ClassNotFoundException { String filename = className.replace('.', File.separatorChar) + ".groovy"; URLConnection dependentScriptConn = null; try { dependentScriptConn = rc.getResourceConnection(filename); finalEntry.dependencies.put( dependentScriptConn.getURL(), new Long(dependentScriptConn.getLastModified())); } catch (ResourceException e1) { throw new ClassNotFoundException("Could not read " + className + ": " + e1); } try { return parseClass(dependentScriptConn.getInputStream(), filename); } catch (CompilationFailedException e2) { throw new ClassNotFoundException("Syntax error in " + className + ": " + e2); } catch (IOException e2) { throw new ClassNotFoundException("Problem reading " + className + ": " + e2); } } }; } }); try { entry.scriptClass = groovyLoader.parseClass(groovyScriptConn.getInputStream(), scriptName); } catch (Exception e) { throw new ScriptException("Could not parse scriptName: " + scriptName, e); } entry.lastModified = lastModified; scriptCache.put(scriptName, entry); } } return entry; } /** * Run a script identified by name. * * @param scriptName name of the script to run * @param argument a single argument passed as a variable named <code>arg</code> in the binding * @return a <code>toString()</code> representation of the result of the execution of the script * @throws ResourceException * @throws ScriptException */ public String run(String scriptName, String argument) throws ResourceException, ScriptException { Binding binding = new Binding(); binding.setVariable("arg", argument); Object result = run(scriptName, binding); return result == null ? "" : result.toString(); } /** * Run a script identified by name. * * @param scriptName name of the script to run * @param binding binding to pass to the script * @return an object * @throws ResourceException * @throws ScriptException */ public Object run(String scriptName, Binding binding) throws ResourceException, ScriptException { ScriptCacheEntry entry = updateCacheEntry(scriptName, getParentClassLoader()); Script scriptObject = InvokerHelper.createScript(entry.scriptClass, binding); return scriptObject.run(); } }
package net.i2p.router.peermanager; import java.io.IOException; import java.io.OutputStream; import java.io.Writer; import java.net.InetAddress; import java.net.UnknownHostException; import java.text.DecimalFormat; import java.text.DecimalFormatSymbols; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Properties; import java.util.Random; import java.util.Set; import java.util.TreeSet; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.ReentrantReadWriteLock; import net.i2p.data.Hash; import net.i2p.data.RouterAddress; import net.i2p.data.RouterInfo; import net.i2p.router.NetworkDatabaseFacade; import net.i2p.router.RouterContext; import net.i2p.router.tunnel.pool.TunnelPeerSelector; import net.i2p.stat.Rate; import net.i2p.stat.RateStat; import net.i2p.util.Log; /** * Keep the peer profiles organized according to the tiered model. This does not * actively update anything - the reorganize() method should be called periodically * to recalculate thresholds and move profiles into the appropriate tiers, and addProfile() * should be used to add new profiles (placing them into the appropriate groupings). */ public class ProfileOrganizer { private Log _log; private RouterContext _context; /** H(routerIdentity) to PeerProfile for all peers that are fast and high capacity*/ private Map<Hash, PeerProfile> _fastPeers; /** H(routerIdentity) to PeerProfile for all peers that have high capacities */ private Map<Hash, PeerProfile> _highCapacityPeers; /** H(routerIdentity) to PeerProfile for all peers that well integrated into the network and not failing horribly */ private Map<Hash, PeerProfile> _wellIntegratedPeers; /** H(routerIdentity) to PeerProfile for all peers that are not failing horribly */ private Map<Hash, PeerProfile> _notFailingPeers; /** H(routerIdnetity), containing elements in _notFailingPeers */ private List<Hash> _notFailingPeersList; /** H(routerIdentity) to PeerProfile for all peers that ARE failing horribly (but that we haven't dropped reference to yet) */ private Map<Hash, PeerProfile> _failingPeers; /** who are we? */ private Hash _us; private ProfilePersistenceHelper _persistenceHelper; /** PeerProfile objects for all peers profiled, orderd by the ones with the highest capacity first */ private Set<PeerProfile> _strictCapacityOrder; /** threshold speed value, seperating fast from slow */ private double _thresholdSpeedValue; /** threshold reliability value, seperating reliable from unreliable */ private double _thresholdCapacityValue; /** integration value, seperating well integrated from not well integrated */ private double _thresholdIntegrationValue; private InverseCapacityComparator _comp; /** * Defines the minimum number of 'fast' peers that the organizer should select. See * {@link ProfileOrganizer#getMinimumFastPeers} * */ public static final String PROP_MINIMUM_FAST_PEERS = "profileOrganizer.minFastPeers"; public static final int DEFAULT_MINIMUM_FAST_PEERS = 8; /** this is misnamed, it is really the max minimum number. */ private static final int DEFAULT_MAXIMUM_FAST_PEERS = 16; /** * Defines the minimum number of 'high capacity' peers that the organizer should * select when using the mean - if less than this many are available, select the * capacity by the median. * */ public static final String PROP_MINIMUM_HIGH_CAPACITY_PEERS = "profileOrganizer.minHighCapacityPeers"; public static final int DEFAULT_MINIMUM_HIGH_CAPACITY_PEERS = 10; /** synchronized against this lock when updating the tier that peers are located in (and when fetching them from a peer) */ private final ReentrantReadWriteLock _reorganizeLock = new ReentrantReadWriteLock(true); /** incredibly weak PRNG, just used for shuffling peers. no need to waste the real PRNG on this */ private Random _random = new Random(); public ProfileOrganizer(RouterContext context) { _context = context; _log = context.logManager().getLog(ProfileOrganizer.class); _comp = new InverseCapacityComparator(); _fastPeers = new HashMap(16); _highCapacityPeers = new HashMap(32); _wellIntegratedPeers = new HashMap(16); _notFailingPeers = new HashMap(256); _notFailingPeersList = new ArrayList(256); _failingPeers = new HashMap(16); _strictCapacityOrder = new TreeSet(_comp); _thresholdSpeedValue = 0.0d; _thresholdCapacityValue = 0.0d; _thresholdIntegrationValue = 0.0d; _persistenceHelper = new ProfilePersistenceHelper(_context); _context.statManager().createRateStat("peer.profileSortTime", "How long the reorg takes sorting peers", "Peers", new long[] { 10*60*1000 }); _context.statManager().createRateStat("peer.profileCoalesceTime", "How long the reorg takes coalescing peer stats", "Peers", new long[] { 10*60*1000 }); _context.statManager().createRateStat("peer.profileThresholdTime", "How long the reorg takes determining the tier thresholds", "Peers", new long[] { 10*60*1000 }); _context.statManager().createRateStat("peer.profilePlaceTime", "How long the reorg takes placing peers in the tiers", "Peers", new long[] { 10*60*1000 }); _context.statManager().createRateStat("peer.profileReorgTime", "How long the reorg takes overall", "Peers", new long[] { 10*60*1000 }); // used in DBHistory _context.statManager().createRateStat("peer.failedLookupRate", "DB Lookup fail rate", "Peers", new long[] { 10*60*1000l, 60*60*1000l, 24*60*60*1000l }); } private void getReadLock() { _reorganizeLock.readLock().lock(); } private void releaseReadLock() { _reorganizeLock.readLock().unlock(); } /** @return true if the lock was acquired */ private boolean getWriteLock() { try { boolean rv = _reorganizeLock.writeLock().tryLock(3000, TimeUnit.MILLISECONDS); if ((!rv) && _log.shouldLog(Log.WARN)) _log.warn("no lock, size is: " + _reorganizeLock.getQueueLength(), new Exception("rats")); return rv; } catch (InterruptedException ie) {} return false; } private void releaseWriteLock() { _reorganizeLock.writeLock().unlock(); } public void setUs(Hash us) { _us = us; } public Hash getUs() { return _us; } public double getSpeedThreshold() { return _thresholdSpeedValue; } public double getCapacityThreshold() { return _thresholdCapacityValue; } public double getIntegrationThreshold() { return _thresholdIntegrationValue; } /** * Retrieve the profile for the given peer, if one exists (else null) * */ public PeerProfile getProfile(Hash peer) { getReadLock(); try { return locked_getProfile(peer); } finally { releaseReadLock(); } } /** * Add the new profile, returning the old value (or null if no profile existed) * */ public PeerProfile addProfile(PeerProfile profile) { if ( (profile == null) || (profile.getPeer() == null) ) return null; if (_log.shouldLog(Log.DEBUG)) _log.debug("New profile created for " + profile.getPeer().toBase64()); PeerProfile old = getProfile(profile.getPeer()); profile.coalesceStats(); if (!getWriteLock()) return old; try { locked_placeProfile(profile); _strictCapacityOrder.add(profile); } finally { releaseWriteLock(); } return old; } private int count(Map m) { getReadLock(); try { return m.size(); } finally { releaseReadLock(); } } public int countFastPeers() { return count(_fastPeers); } public int countHighCapacityPeers() { return count(_highCapacityPeers); } public int countWellIntegratedPeers() { return count(_wellIntegratedPeers); } public int countNotFailingPeers() { return count(_notFailingPeers); } public int countFailingPeers() { return count(_failingPeers); } public int countActivePeers() { int activePeers = 0; long hideBefore = _context.clock().now() - 6*60*60*1000; getReadLock(); try { for (Iterator<PeerProfile> iter = _failingPeers.values().iterator(); iter.hasNext(); ) { PeerProfile profile = iter.next(); if (profile.getLastSendSuccessful() >= hideBefore) activePeers++; else if (profile.getLastHeardFrom() >= hideBefore) activePeers++; } for (Iterator<PeerProfile> iter = _notFailingPeers.values().iterator(); iter.hasNext(); ) { PeerProfile profile = iter.next(); if (profile.getLastSendSuccessful() >= hideBefore) activePeers++; else if (profile.getLastHeardFrom() >= hideBefore) activePeers++; } } finally { releaseReadLock(); } return activePeers; } private boolean isX(Map m, Hash peer) { getReadLock(); try { return m.containsKey(peer); } finally { releaseReadLock(); } } public boolean isFast(Hash peer) { return isX(_fastPeers, peer); } public boolean isHighCapacity(Hash peer) { return isX(_highCapacityPeers, peer); } public boolean isWellIntegrated(Hash peer) { return isX(_wellIntegratedPeers, peer); } public boolean isFailing(Hash peer) { return isX(_failingPeers, peer); } /** * if a peer sends us more than 5 replies in a searchReply that we cannot * fetch, stop listening to them. * */ private final static int MAX_BAD_REPLIES_PER_HOUR = 5; /** * Does the given peer send us bad replies - either invalid store messages * (expired, corrupt, etc) or unreachable replies (pointing towards routers * that don't exist). * */ public boolean peerSendsBadReplies(Hash peer) { PeerProfile profile = getProfile(peer); if (profile != null && profile.getIsExpandedDB()) { RateStat invalidReplyRateStat = profile.getDBHistory().getInvalidReplyRate(); Rate invalidReplyRate = invalidReplyRateStat.getRate(30*60*1000l); if ( (invalidReplyRate.getCurrentTotalValue() > MAX_BAD_REPLIES_PER_HOUR) || (invalidReplyRate.getLastTotalValue() > MAX_BAD_REPLIES_PER_HOUR) ) { return true; } } return false; } public void exportProfile(Hash profile, OutputStream out) throws IOException { PeerProfile prof = getProfile(profile); if (prof != null) _persistenceHelper.writeProfile(prof, out); } /** * Return a set of Hashes for peers that are both fast and reliable. If an insufficient * number of peers are both fast and reliable, fall back onto high capacity peers, and if that * doesn't contain sufficient peers, fall back onto not failing peers, and even THAT doesn't * have sufficient peers, fall back onto failing peers. * * @param howMany how many peers are desired * @param exclude set of Hashes for routers that we don't want selected * @param matches set to store the return value in * */ public void selectFastPeers(int howMany, Set exclude, Set matches) { selectFastPeers(howMany, exclude, matches, 0); } public void selectFastPeers(int howMany, Set exclude, Set matches, int mask) { getReadLock(); try { locked_selectPeers(_fastPeers, howMany, exclude, matches, mask); } finally { releaseReadLock(); } if (matches.size() < howMany) { if (_log.shouldLog(Log.INFO)) _log.info("selectFastPeers("+howMany+"), not enough fast (" + matches.size() + ") going on to highCap"); selectHighCapacityPeers(howMany, exclude, matches, mask); } else { if (_log.shouldLog(Log.INFO)) _log.info("selectFastPeers("+howMany+"), found enough fast (" + matches.size() + ")"); } return; } /** * Return a set of Hashes for peers that have a high capacity * */ public void selectHighCapacityPeers(int howMany, Set exclude, Set matches) { selectHighCapacityPeers(howMany, exclude, matches, 0); } public void selectHighCapacityPeers(int howMany, Set exclude, Set matches, int mask) { getReadLock(); try { // we only use selectHighCapacityPeers when we are selecting for PURPOSE_TEST // or we are falling back due to _fastPeers being too small, so we can always // exclude the fast peers /* if (exclude == null) exclude = new HashSet(_fastPeers.keySet()); else exclude.addAll(_fastPeers.keySet()); */ locked_selectPeers(_highCapacityPeers, howMany, exclude, matches, mask); } finally { releaseReadLock(); } if (matches.size() < howMany) { if (_log.shouldLog(Log.INFO)) _log.info("selectHighCap("+howMany+"), not enough highcap (" + matches.size() + ") going on to ANFP2"); selectActiveNotFailingPeers2(howMany, exclude, matches, mask); } else { if (_log.shouldLog(Log.INFO)) _log.info("selectHighCap("+howMany+"), found enough highCap (" + matches.size() + ")"); } return; } /** * Return a set of Hashes for peers that are well integrated into the network. * */ public void selectWellIntegratedPeers(int howMany, Set exclude, Set matches) { selectWellIntegratedPeers(howMany, exclude, matches, 0); } public void selectWellIntegratedPeers(int howMany, Set exclude, Set matches, int mask) { getReadLock(); try { locked_selectPeers(_wellIntegratedPeers, howMany, exclude, matches, mask); } finally { releaseReadLock(); } if (matches.size() < howMany) { if (_log.shouldLog(Log.INFO)) _log.info("selectWellIntegrated("+howMany+"), not enough integrated (" + matches.size() + ") going on to notFailing"); selectNotFailingPeers(howMany, exclude, matches, mask); } else { if (_log.shouldLog(Log.INFO)) _log.info("selectWellIntegrated("+howMany+"), found enough well integrated (" + matches.size() + ")"); } return; } /** * Return a set of Hashes for peers that are not failing, preferring ones that * we are already talking with * */ public void selectNotFailingPeers(int howMany, Set exclude, Set matches) { selectNotFailingPeers(howMany, exclude, matches, false, 0); } public void selectNotFailingPeers(int howMany, Set exclude, Set matches, int mask) { selectNotFailingPeers(howMany, exclude, matches, false, mask); } public void selectNotFailingPeers(int howMany, Set exclude, Set matches, boolean onlyNotFailing) { selectNotFailingPeers(howMany, exclude, matches, onlyNotFailing, 0); } /** * Return a set of Hashes for peers that are not failing, preferring ones that * we are already talking with * * @param howMany how many peers to find * @param exclude what peers to skip (may be null) * @param matches set to store the matches in * @param onlyNotFailing if true, don't include any high capacity peers */ public void selectNotFailingPeers(int howMany, Set exclude, Set matches, boolean onlyNotFailing, int mask) { if (matches.size() < howMany) selectAllNotFailingPeers(howMany, exclude, matches, onlyNotFailing, mask); return; } /** * Return a set of Hashes for peers that are both not failing and we're actively * talking with. * * We use commSystem().isEstablished(), not profile.getIsActive(), as the * NTCP idle time is now shorter than the 5 minute getIsActive() threshold, * and we're using this to try and limit connections. * * Caution, this does NOT cascade further to non-connected peers, so it should only * be used when there is a good number of connected peers. * * @param exclude non-null * No mask parameter, to be fixed */ public void selectActiveNotFailingPeers(int howMany, Set exclude, Set matches) { if (matches.size() < howMany) { getReadLock(); try { for (Iterator<Hash> iter = _notFailingPeers.keySet().iterator(); iter.hasNext(); ) { Hash peer = iter.next(); if (!_context.commSystem().isEstablished(peer)) exclude.add(peer); } locked_selectPeers(_notFailingPeers, howMany, exclude, matches, 0); } finally { releaseReadLock(); } } } /** * Return a set of Hashes for peers that are both not failing and we're actively * talking with. * * We use commSystem().isEstablished(), not profile.getIsActive(), as the * NTCP idle time is now shorter than the 5 minute getIsActive() threshold, * and we're using this to try and limit connections. * * This DOES cascade further to non-connected peers. */ private void selectActiveNotFailingPeers2(int howMany, Set exclude, Set matches, int mask) { if (matches.size() < howMany) { Map<Hash, PeerProfile> activePeers = new HashMap(); getReadLock(); try { for (Iterator<Map.Entry<Hash, PeerProfile>> iter = _notFailingPeers.entrySet().iterator(); iter.hasNext(); ) { Map.Entry<Hash, PeerProfile> e = iter.next(); if (_context.commSystem().isEstablished(e.getKey())) activePeers.put(e.getKey(), e.getValue()); } locked_selectPeers(activePeers, howMany, exclude, matches, mask); } finally { releaseReadLock(); } } if (matches.size() < howMany) { if (_log.shouldLog(Log.INFO)) _log.info("selectANFP2("+howMany+"), not enough ANFP (" + matches.size() + ") going on to notFailing"); selectNotFailingPeers(howMany, exclude, matches, mask); } else { if (_log.shouldLog(Log.INFO)) _log.info("selectANFP2("+howMany+"), found enough ANFP (" + matches.size() + ")"); } } /** * Return a set of Hashes for peers that are not failing. * */ public void selectAllNotFailingPeers(int howMany, Set exclude, Set matches, boolean onlyNotFailing) { selectAllNotFailingPeers(howMany, exclude, matches, onlyNotFailing, 0); } /** * @param mask ignored, should call locked_selectPeers, to be fixed * */ private void selectAllNotFailingPeers(int howMany, Set exclude, Set matches, boolean onlyNotFailing, int mask) { if (matches.size() < howMany) { int orig = matches.size(); int needed = howMany - orig; int start = 0; List selected = new ArrayList(needed); getReadLock(); try { // we randomize the whole list when rebuilding it, but randomizing // the entire list on each peer selection is a bit crazy start = _context.random().nextInt(_notFailingPeersList.size()); for (int i = 0; i < _notFailingPeersList.size() && selected.size() < needed; i++) { int curIndex = (i+start) % _notFailingPeersList.size(); Hash cur = (Hash)_notFailingPeersList.get(curIndex); if (matches.contains(cur) || (exclude != null && exclude.contains(cur))) { if (_log.shouldLog(Log.DEBUG)) _log.debug("matched? " + matches.contains(cur) + " exclude: " + exclude + " cur=" + cur.toBase64()); continue; } else if (onlyNotFailing && _highCapacityPeers.containsKey(cur)) { // we dont want the good peers, just random ones continue; } else { if (isSelectable(cur)) selected.add(cur); else if (_log.shouldLog(Log.DEBUG)) _log.debug("Not selectable: " + cur.toBase64()); } } } finally { releaseReadLock(); } if (_log.shouldLog(Log.INFO)) _log.info("Selecting all not failing (strict? " + onlyNotFailing + " start=" + start + ") found " + selected.size() + " new peers: " + selected + " all=" + _notFailingPeersList.size() + " strict=" + _strictCapacityOrder.size()); matches.addAll(selected); } if (matches.size() < howMany) { if (_log.shouldLog(Log.INFO)) _log.info("selectAllNotFailing("+howMany+"), not enough (" + matches.size() + ") going on to failing"); selectFailingPeers(howMany, exclude, matches); } else { if (_log.shouldLog(Log.INFO)) _log.info("selectAllNotFailing("+howMany+"), enough (" + matches.size() + ")"); } return; } /** * I'm not quite sure why you'd want this... (other than for failover from the better results) * */ public void selectFailingPeers(int howMany, Set exclude, Set matches) { getReadLock(); try { locked_selectPeers(_failingPeers, howMany, exclude, matches); } finally { releaseReadLock(); } return; } /** * Get the peers the transport layer thinks are unreachable, and * add in the peers with the SSU peer testing bug, * and peers requiring introducers. * */ public List<Hash> selectPeersLocallyUnreachable() { List<Hash> n; int count; getReadLock(); try { count = _notFailingPeers.size(); n = new ArrayList(_notFailingPeers.keySet()); } finally { releaseReadLock(); } List<Hash> l = new ArrayList(count / 4); for (Iterator<Hash> iter = n.iterator(); iter.hasNext(); ) { Hash peer = iter.next(); if (_context.commSystem().wasUnreachable(peer)) l.add(peer); else { // Blacklist <= 0.6.1.32 SSU-only peers, they don't know if they are unreachable, // and we may not know either if they contacted us first, so assume they are. // Also blacklist all peers requiring SSU introducers, because either // a) it's slow; or // b) it doesn't work very often; or // c) in the event they are advertising NTCP, it probably won't work because // they probably don't have a TCP hole punched in their firewall either. RouterInfo info = _context.netDb().lookupRouterInfoLocally(peer); if (info != null) { String v = info.getOption("router.version"); // this only works if there is no 0.6.1.34! if (v != null && (!v.equals("0.6.1.33")) && v.startsWith("0.6.1.") && info.getTargetAddress("NTCP") == null) l.add(peer); else { RouterAddress ra = info.getTargetAddress("SSU"); // peers with no SSU address at all are fine. // as long as they have NTCP if (ra == null) { if (info.getTargetAddress("NTCP") == null) l.add(peer); continue; } // This is the quick way of doing UDPAddress.getIntroducerCount() > 0 Properties props = ra.getOptions(); if (props != null && props.getProperty("ihost0") != null) l.add(peer); } } } } if (_log.shouldLog(Log.DEBUG)) _log.debug("Unreachable: " + l); return l; } /** * Get the peers that have recently rejected us for bandwidth * recent == last 20s * */ public List selectPeersRecentlyRejecting() { getReadLock(); try { long cutoff = _context.clock().now() - (20*1000); int count = _notFailingPeers.size(); List l = new ArrayList(count / 128); for (Iterator<PeerProfile> iter = _notFailingPeers.values().iterator(); iter.hasNext(); ) { PeerProfile prof = iter.next(); if (prof.getTunnelHistory().getLastRejectedBandwidth() > cutoff) l.add(prof.getPeer()); } return l; } finally { releaseReadLock(); } } /** * Find the hashes for all peers we are actively profiling * */ public Set selectAllPeers() { getReadLock(); try { Set allPeers = new HashSet(_failingPeers.size() + _notFailingPeers.size() + _highCapacityPeers.size() + _fastPeers.size()); allPeers.addAll(_failingPeers.keySet()); allPeers.addAll(_notFailingPeers.keySet()); allPeers.addAll(_highCapacityPeers.keySet()); allPeers.addAll(_fastPeers.keySet()); return allPeers; } finally { releaseReadLock(); } } private static final long MIN_EXPIRE_TIME = 3*60*60*1000; private static final long MAX_EXPIRE_TIME = 6*60*60*1000; private static final long ADJUST_EXPIRE_TIME = 60*1000; private static final int ENOUGH_PROFILES = 600; private long _currentExpireTime = MAX_EXPIRE_TIME; /** * Place peers into the correct tier, as well as expand/contract and even drop profiles * according to whatever limits are in place. Peer profiles are not coalesced during * this method, but the averages are recalculated. * */ public void reorganize() { reorganize(false); } public void reorganize(boolean shouldCoalesce) { long sortTime = 0; int coalesceTime = 0; long thresholdTime = 0; long placeTime = 0; int profileCount = 0; long uptime = _context.router().getUptime(); long expireOlderThan = -1; if (uptime > 60*60*1000) { // dynamically adjust expire time to control memory usage if (countNotFailingPeers() > ENOUGH_PROFILES) _currentExpireTime = Math.max(_currentExpireTime - ADJUST_EXPIRE_TIME, MIN_EXPIRE_TIME); else _currentExpireTime = Math.min(_currentExpireTime + ADJUST_EXPIRE_TIME, MAX_EXPIRE_TIME); // drop profiles that we haven't spoken to in a while expireOlderThan = _context.clock().now() - _currentExpireTime; } if (!getWriteLock()) return; long start = System.currentTimeMillis(); try { Set<PeerProfile> allPeers = _strictCapacityOrder; //new HashSet(_failingPeers.size() + _notFailingPeers.size() + _highCapacityPeers.size() + _fastPeers.size()); //allPeers.addAll(_failingPeers.values()); //allPeers.addAll(_notFailingPeers.values()); //allPeers.addAll(_highCapacityPeers.values()); //allPeers.addAll(_fastPeers.values()); Set<PeerProfile> reordered = new TreeSet(_comp); long sortStart = System.currentTimeMillis(); for (Iterator<PeerProfile> iter = _strictCapacityOrder.iterator(); iter.hasNext(); ) { PeerProfile prof = iter.next(); if ( (expireOlderThan > 0) && (prof.getLastSendSuccessful() <= expireOlderThan) ) continue; // drop, but no need to delete, since we don't periodically reread if (shouldCoalesce) { long coalesceStart = System.currentTimeMillis(); prof.coalesceStats(); coalesceTime += (int)(System.currentTimeMillis()-coalesceStart); } reordered.add(prof); profileCount++; } sortTime = System.currentTimeMillis() - sortStart; _strictCapacityOrder = reordered; long thresholdStart = System.currentTimeMillis(); locked_calculateThresholds(allPeers); thresholdTime = System.currentTimeMillis()-thresholdStart; _failingPeers.clear(); _fastPeers.clear(); _highCapacityPeers.clear(); _notFailingPeers.clear(); _notFailingPeersList.clear(); _wellIntegratedPeers.clear(); long placeStart = System.currentTimeMillis(); for (Iterator<PeerProfile> iter = allPeers.iterator(); iter.hasNext(); ) { PeerProfile profile = iter.next(); locked_placeProfile(profile); } locked_unfailAsNecessary(); locked_demoteHighCapAsNecessary(); locked_promoteFastAsNecessary(); locked_demoteFastAsNecessary(); Collections.shuffle(_notFailingPeersList, _context.random()); placeTime = System.currentTimeMillis()-placeStart; } finally { releaseWriteLock(); } if (_log.shouldLog(Log.INFO)) _log.info("Profiles reorganized. averages: [integration: " + _thresholdIntegrationValue + ", capacity: " + _thresholdCapacityValue + ", speed: " + _thresholdSpeedValue + "]"); long total = System.currentTimeMillis()-start; _context.statManager().addRateData("peer.profileSortTime", sortTime, profileCount); _context.statManager().addRateData("peer.profileCoalesceTime", coalesceTime, profileCount); _context.statManager().addRateData("peer.profileThresholdTime", thresholdTime, profileCount); _context.statManager().addRateData("peer.profilePlaceTime", placeTime, profileCount); _context.statManager().addRateData("peer.profileReorgTime", total, profileCount); } /** * As with locked_unfailAsNecessary, I'm not sure how much I like this - if there * aren't enough fast peers, move some of the not-so-fast peers into the fast group. * This picks the not-so-fast peers based on capacity, not speed, and skips over any * failing peers. Perhaps it should build a seperate strict ordering by speed? Nah, not * worth the maintenance and memory overhead, at least not for now. * */ private void locked_promoteFastAsNecessary() { int minFastPeers = getMinimumFastPeers(); int numToPromote = minFastPeers - _fastPeers.size(); if (numToPromote > 0) { if (_log.shouldLog(Log.INFO)) _log.info("Need to explicitly promote " + numToPromote + " peers to the fast group"); for (Iterator<PeerProfile> iter = _strictCapacityOrder.iterator(); iter.hasNext(); ) { PeerProfile cur = iter.next(); if ( (!_fastPeers.containsKey(cur.getPeer())) && (!cur.getIsFailing()) ) { if (!isSelectable(cur.getPeer())) { // skip peers we dont have in the netDb // if (_log.shouldLog(Log.INFO)) // _log.info("skip unknown peer from fast promotion: " + cur.getPeer().toBase64()); continue; } if (!cur.getIsActive()) { // skip inactive // if (_log.shouldLog(Log.INFO)) // _log.info("skip inactive peer from fast promotion: " + cur.getPeer().toBase64()); continue; } if (_log.shouldLog(Log.INFO)) _log.info("Fast promoting: " + cur.getPeer().toBase64()); _fastPeers.put(cur.getPeer(), cur); // no need to remove it from any of the other groups, since if it is // fast, it has a high capacity, and it is not failing numToPromote if (numToPromote <= 0) break; } } } return; } /** * We want to put a cap on the fast pool, to use only a small set of routers * for client tunnels for anonymity reasons. Also, unless we use only a small * number, we don't really find out who the fast ones are. * @since 0.7.10 */ private void locked_demoteFastAsNecessary() { int maxFastPeers = getMaximumFastPeers(); int numToDemote = _fastPeers.size() - maxFastPeers; if (numToDemote > 0) { if (_log.shouldLog(Log.INFO)) _log.info("Need to explicitly demote " + numToDemote + " peers from the fast group"); // sort by speed, slowest-first Set<PeerProfile> sorted = new TreeSet(new SpeedComparator()); sorted.addAll(_fastPeers.values()); Iterator<PeerProfile> iter = sorted.iterator(); for (int i = 0; i < numToDemote; i++) { _fastPeers.remove(iter.next().getPeer()); } } } /** * We want to put a limit on the high cap pool, to use only a small set of routers * for expl. tunnels for anonymity reasons. Also, unless we use only a small * number, we don't really find out who the high capacity ones are. * @since 0.7.11 */ private void locked_demoteHighCapAsNecessary() { int maxHighCapPeers = getMaximumHighCapPeers(); int numToDemote = _highCapacityPeers.size() - maxHighCapPeers; if (numToDemote > 0) { if (_log.shouldLog(Log.INFO)) _log.info("Need to explicitly demote " + numToDemote + " peers from the high cap group"); // sorted by capacity, highest-first Iterator<PeerProfile> iter = _strictCapacityOrder.iterator(); for (int i = 0; iter.hasNext() && i < maxHighCapPeers; ) { if (_highCapacityPeers.containsKey(iter.next().getPeer())) i++; } for (int i = 0; iter.hasNext() && i < numToDemote; ) { Hash h = iter.next().getPeer(); if (_highCapacityPeers.remove(h) != null) { _fastPeers.remove(h); i++; } } } } /** how many not failing/active peers must we have? */ private final static int MIN_NOT_FAILING_ACTIVE = 3; /** * I'm not sure how much I dislike the following - if there aren't enough * active and not-failing peers, pick the most reliable active peers and * override their 'failing' flag, resorting them into the not-failing buckets * */ private void locked_unfailAsNecessary() { int notFailingActive = 0; for (Iterator<Hash> iter = _notFailingPeers.keySet().iterator(); iter.hasNext(); ) { Hash key = iter.next(); PeerProfile peer = _notFailingPeers.get(key); if (peer.getIsActive()) notFailingActive++; if (notFailingActive >= MIN_NOT_FAILING_ACTIVE) { // we've got enough, no need to try further return; } } // we dont have enough, lets unfail our best ones remaining int needToUnfail = MIN_NOT_FAILING_ACTIVE - notFailingActive; if (needToUnfail > 0) { int unfailed = 0; for (Iterator<PeerProfile> iter = _strictCapacityOrder.iterator(); iter.hasNext(); ) { PeerProfile best = iter.next(); if ( (best.getIsActive()) && (best.getIsFailing()) ) { if (_log.shouldLog(Log.WARN)) _log.warn("All peers were failing, so we have overridden the failing flag for one of the most reliable active peers (" + best.getPeer().toBase64() + ")"); best.setIsFailing(false); locked_placeProfile(best); unfailed++; } if (unfailed >= needToUnfail) break; } } } // no more public stuff below /** * Update the thresholds based on the profiles in this set. currently * implements the capacity threshold based on the mean capacity of active * and nonfailing peers (falling back on the median if that results in too * few peers. We then use the median speed from that group to define the * speed threshold, and use the mean integration value from the * high capacity group to define the integration threshold. * */ private void locked_calculateThresholds(Set allPeers) { double totalCapacity = 0; double totalIntegration = 0; Set reordered = new TreeSet(_comp); for (Iterator<PeerProfile> iter = allPeers.iterator(); iter.hasNext(); ) { PeerProfile profile = iter.next(); if (_us.equals(profile.getPeer())) continue; // only take into account active peers that aren't failing if (profile.getIsFailing() || (!profile.getIsActive())) continue; // dont bother trying to make sense of things below the baseline // otoh, keep them in the threshold calculation, so we can adapt ////if (profile.getCapacityValue() <= CapacityCalculator.GROWTH_FACTOR) //// continue; totalCapacity += profile.getCapacityValue(); totalIntegration += profile.getIntegrationValue(); reordered.add(profile); } locked_calculateCapacityThreshold(totalCapacity, reordered); locked_calculateSpeedThreshold(reordered); if (totalIntegration > 0) _thresholdIntegrationValue = 1.0d * avg(totalIntegration, reordered.size()); else // Make nobody rather than everybody well-integrated _thresholdIntegrationValue = 1.0d; } /** * Update the _thresholdCapacityValue by using a few simple formulas run * against the specified peers. Ideally, we set the threshold capacity to * the mean, as long as that gives us enough peers and is greater than the * median. * * @param reordered ordered set of PeerProfile objects, ordered by capacity * (highest first) for active nonfailing peers whose * capacity is greater than the growth factor */ private void locked_calculateCapacityThreshold(double totalCapacity, Set reordered) { int numNotFailing = reordered.size(); double meanCapacity = avg(totalCapacity, numNotFailing); int minHighCapacityPeers = getMinimumHighCapacityPeers(); int numExceedingMean = 0; double thresholdAtMedian = 0; double thresholdAtMinHighCap = 0; double thresholdAtLowest = CapacityCalculator.GROWTH_FACTOR; int cur = 0; for (Iterator<PeerProfile> iter = reordered.iterator(); iter.hasNext(); ) { PeerProfile profile = iter.next(); double val = profile.getCapacityValue(); if (val > meanCapacity) numExceedingMean++; if (cur == reordered.size()/2) thresholdAtMedian = val; if (cur == minHighCapacityPeers - 1) thresholdAtMinHighCap = val; if (cur == reordered.size() -1) thresholdAtLowest = val; cur++; } if (numExceedingMean >= minHighCapacityPeers) { // our average is doing well (growing, not recovering from failures) if (_log.shouldLog(Log.INFO)) _log.info("Our average capacity is doing well [" + meanCapacity + "], and includes " + numExceedingMean); _thresholdCapacityValue = meanCapacity; } else if (meanCapacity > thresholdAtMedian && reordered.size()/2 > minHighCapacityPeers) { // avg > median, get the min High Cap peers if (_log.shouldLog(Log.INFO)) _log.info("Our average capacity [" + meanCapacity + "] is greater than the median," + " so threshold is that reqd to get the min high cap peers " + thresholdAtMinHighCap); _thresholdCapacityValue = thresholdAtMinHighCap; } else if (reordered.size()/2 >= minHighCapacityPeers) { // ok mean is skewed low, but we still have enough to use the median // We really don't want to be here, since the default is 5.0 and the median // is inevitably 5.01 or so. if (_log.shouldLog(Log.INFO)) _log.info("Our average capacity [" + meanCapacity + "] is skewed under the median," + " so use the median threshold " + thresholdAtMedian); _thresholdCapacityValue = thresholdAtMedian; } else { // our average is doing well, but not enough peers if (_log.shouldLog(Log.INFO)) _log.info("Our average capacity is doing well [" + meanCapacity + "], but there aren't enough of them " + numExceedingMean); _thresholdCapacityValue = Math.max(thresholdAtMinHighCap, thresholdAtLowest); } // the base growth factor is the value we give to new routers that we don't // know anything about. dont go under that limit unless you want to expose // the selection to simple ident flooding attacks if (_thresholdCapacityValue <= CapacityCalculator.GROWTH_FACTOR) _thresholdCapacityValue = CapacityCalculator.GROWTH_FACTOR + 0.0001; } /** * Update the _thresholdSpeedValue by calculating the median speed of all * high capacity peers. * * @param reordered ordered set of PeerProfile objects, ordered by capacity * (highest first) for active nonfailing peers */ private void locked_calculateSpeedThreshold(Set reordered) { if (true) { locked_calculateSpeedThresholdMean(reordered); return; } } private void locked_calculateSpeedThresholdMean(Set reordered) { double total = 0; int count = 0; for (Iterator<PeerProfile> iter = reordered.iterator(); iter.hasNext(); ) { PeerProfile profile = iter.next(); if (profile.getCapacityValue() >= _thresholdCapacityValue) { // duplicates being clobbered is fine by us total += profile.getSpeedValue(); count++; } else { // its ordered break; } } if (count > 0) _thresholdSpeedValue = total / count; if (_log.shouldLog(Log.INFO)) _log.info("Threshold value for speed: " + _thresholdSpeedValue + " out of speeds: " + count); } /** simple average, or 0 if NaN */ private final static double avg(double total, double quantity) { if ( (total > 0) && (quantity > 0) ) return total/quantity; else return 0.0d; } /** called after locking the reorganizeLock */ private PeerProfile locked_getProfile(Hash peer) { PeerProfile cur = (PeerProfile)_notFailingPeers.get(peer); if (cur != null) return cur; cur = (PeerProfile)_failingPeers.get(peer); return cur; } /** * Select peers from the peer mapping, excluding appropriately and increasing the * matches set until it has howMany elements in it. * */ private void locked_selectPeers(Map peers, int howMany, Set toExclude, Set matches) { locked_selectPeers(peers, howMany, toExclude, matches, 0); } private void locked_selectPeers(Map peers, int howMany, Set toExclude, Set matches, int mask) { List all = new ArrayList(peers.keySet()); if (toExclude != null) all.removeAll(toExclude); all.removeAll(matches); all.remove(_us); Collections.shuffle(all, _random); Set IPSet = new HashSet(8); for (int i = 0; (matches.size() < howMany) && (i < all.size()); i++) { Hash peer = (Hash)all.get(i); boolean ok = isSelectable(peer); if (ok) { ok = mask <= 0 || notRestricted(peer, IPSet, mask); if ((!ok) && _log.shouldLog(Log.WARN)) _log.warn("IP restriction prevents " + peer + " from joining " + matches); } if (ok) matches.add(peer); else matches.remove(peer); } } private boolean notRestricted(Hash peer, Set IPSet, int mask) { Set peerIPs = maskedIPSet(peer, mask); if (containsAny(IPSet, peerIPs)) return false; IPSet.addAll(peerIPs); return true; } /** * The Set of IPs for this peer, with a given mask. * Includes the comm system's record of the IP, and all netDb addresses. * * @return an opaque set of masked IPs for this peer */ private Set maskedIPSet(Hash peer, int mask) { Set rv = new HashSet(2); byte[] commIP = _context.commSystem().getIP(peer); if (commIP != null) rv.add(maskedIP(commIP, mask)); RouterInfo pinfo = _context.netDb().lookupRouterInfoLocally(peer); if (pinfo == null) return rv; Set<RouterAddress> paddr = pinfo.getAddresses(); if (paddr == null) return rv; for (RouterAddress pa : paddr) { Properties pprops = pa.getOptions(); if (pprops == null) continue; String phost = pprops.getProperty("host"); if (phost == null) continue; InetAddress pi; try { pi = InetAddress.getByName(phost); } catch (UnknownHostException uhe) { continue; } if (pi == null) continue; byte[] pib = pi.getAddress(); rv.add(maskedIP(pib, mask)); } return rv; } /** generate an arbitrary unique value for this ip/mask (mask = 1-4) */ private Integer maskedIP(byte[] ip, int mask) { int rv = 0; for (int i = 0; i < mask; i++) rv = (rv << 8) | (ip[i] & 0xff); return Integer.valueOf(rv); } /** does a contain any of the elements in b? */ private boolean containsAny(Set a, Set b) { for (Object o : b) { if (a.contains(o)) return true; } return false; } public boolean isSelectable(Hash peer) { NetworkDatabaseFacade netDb = _context.netDb(); // the CLI shouldn't depend upon the netDb if (netDb == null) return true; if (_context.router() == null) return true; if ( (_context.shitlist() != null) && (_context.shitlist().isShitlisted(peer)) ) { // if (_log.shouldLog(Log.DEBUG)) // _log.debug("Peer " + peer.toBase64() + " is shitlisted, dont select it"); return false; // never select a shitlisted peer } RouterInfo info = _context.netDb().lookupRouterInfoLocally(peer); if (null != info) { if (info.getIdentity().isHidden()) { if (_log.shouldLog(Log.WARN)) _log.warn("Peer " + peer.toBase64() + " is marked as hidden, disallowing its use"); return false; } else { boolean exclude = TunnelPeerSelector.shouldExclude(_context, info); if (exclude) { // if (_log.shouldLog(Log.WARN)) // _log.warn("Peer " + peer.toBase64() + " has capabilities or other stats suggesting we avoid it"); return false; } else { // if (_log.shouldLog(Log.INFO)) // _log.info("Peer " + peer.toBase64() + " is locally known, allowing its use"); return true; } } } else { // if (_log.shouldLog(Log.WARN)) // _log.warn("Peer " + peer.toBase64() + " is NOT locally known, disallowing its use"); return false; } } /** * called after locking the reorganizeLock, place the profile in the appropriate tier. * This is where we implement the (betterThanAverage ? goToTierX : goToTierY) algorithms * */ private void locked_placeProfile(PeerProfile profile) { if (profile.getIsFailing()) { if (!shouldDrop(profile)) _failingPeers.put(profile.getPeer(), profile); _fastPeers.remove(profile.getPeer()); _highCapacityPeers.remove(profile.getPeer()); _wellIntegratedPeers.remove(profile.getPeer()); _notFailingPeers.remove(profile.getPeer()); _notFailingPeersList.remove(profile.getPeer()); } else { _failingPeers.remove(profile.getPeer()); _fastPeers.remove(profile.getPeer()); _highCapacityPeers.remove(profile.getPeer()); _wellIntegratedPeers.remove(profile.getPeer()); _notFailingPeers.put(profile.getPeer(), profile); _notFailingPeersList.add(profile.getPeer()); // if not selectable for a tunnel (shitlisted for example), // don't allow them in the high-cap pool, what would the point of that be? if (_thresholdCapacityValue <= profile.getCapacityValue() && isSelectable(profile.getPeer())) { _highCapacityPeers.put(profile.getPeer(), profile); if (_log.shouldLog(Log.DEBUG)) _log.debug("High capacity: \t" + profile.getPeer().toBase64()); if (_thresholdSpeedValue <= profile.getSpeedValue()) { if (!profile.getIsActive()) { if (_log.shouldLog(Log.INFO)) _log.info("Skipping fast mark [!active] for " + profile.getPeer().toBase64()); } else { _fastPeers.put(profile.getPeer(), profile); if (_log.shouldLog(Log.DEBUG)) _log.debug("Fast: \t" + profile.getPeer().toBase64()); } } } else { // not high capacity, but not failing (yet) } // We aren't using the well-integrated list yet... // But by observation, the floodfill peers are often not in the // high-capacity group, so let's not require a peer to be high-capactiy // to call him well-integrated. // This could be used later to see if a floodfill peer is for real. if (_thresholdIntegrationValue <= profile.getIntegrationValue()) { _wellIntegratedPeers.put(profile.getPeer(), profile); if (_log.shouldLog(Log.DEBUG)) _log.debug("Integrated: \t" + profile.getPeer().toBase64()); } } } /** * This is where we determine whether a failing peer is so poor and we're so overloaded * that we just want to forget they exist. This algorithm won't need to be implemented until * after I2P 1.0, most likely, since we should be able to handle thousands of peers profiled * without ejecting any of them, but anyway, this is how we'd do it. Most likely. * */ private boolean shouldDrop(PeerProfile profile) { return false; } /** * Defines the minimum number of 'fast' peers that the organizer should select. If * the profile calculators derive a threshold that does not select at least this many peers, * the threshold will be overridden to make sure this many peers are in the fast+reliable group. * This parameter should help deal with a lack of diversity in the tunnels created when some * peers are particularly fast. * * Increase default by two for every local destination, up to a max. * * @return minimum number of peers to be placed in the 'fast' group */ protected int getMinimumFastPeers() { int def = Math.min(DEFAULT_MAXIMUM_FAST_PEERS, (2 *_context.clientManager().listClients().size()) + DEFAULT_MINIMUM_FAST_PEERS - 2); return _context.getProperty(PROP_MINIMUM_FAST_PEERS, def); } /** fixme add config @since 0.7.10 */ protected int getMaximumFastPeers() { return 30; } /** fixme add config @since 0.7.11 */ protected int getMaximumHighCapPeers() { return 75; } /** * Defines the minimum number of 'fast' peers that the organizer should select. If * the profile calculators derive a threshold that does not select at least this many peers, * the threshold will be overridden to make sure this many peers are in the fast+reliable group. * This parameter should help deal with a lack of diversity in the tunnels created when some * peers are particularly fast. * * @return minimum number of peers to be placed in the 'fast' group */ protected int getMinimumHighCapacityPeers() { return _context.getProperty(PROP_MINIMUM_HIGH_CAPACITY_PEERS, DEFAULT_MINIMUM_HIGH_CAPACITY_PEERS); } private final static DecimalFormat _fmt = new DecimalFormat("###,##0.00", new DecimalFormatSymbols(Locale.UK)); private final static String num(double num) { synchronized (_fmt) { return _fmt.format(num); } } /** * Read in all of the profiles specified and print out * their calculated values. Usage: <pre> * ProfileOrganizer [filename]* * </pre> */ public static void main(String args[]) { RouterContext ctx = new RouterContext(null); // new net.i2p.router.Router()); ProfileOrganizer organizer = new ProfileOrganizer(ctx); organizer.setUs(Hash.FAKE_HASH); ProfilePersistenceHelper helper = new ProfilePersistenceHelper(ctx); for (int i = 0; i < args.length; i++) { PeerProfile profile = helper.readProfile(new java.io.File(args[i])); if (profile == null) { System.err.println("Could not load profile " + args[i]); continue; } organizer.addProfile(profile); } organizer.reorganize(); DecimalFormat fmt = new DecimalFormat("0,000.0"); fmt.setPositivePrefix("+"); for (Iterator<Hash> iter = organizer.selectAllPeers().iterator(); iter.hasNext(); ) { Hash peer = iter.next(); PeerProfile profile = organizer.getProfile(peer); if (!profile.getIsActive()) { System.out.println("Peer " + profile.getPeer().toBase64().substring(0,4) + " [" + (organizer.isFast(peer) ? "IF+R" : organizer.isHighCapacity(peer) ? "IR " : organizer.isFailing(peer) ? "IX " : "I ") + "]: " + "\t Speed:\t" + fmt.format(profile.getSpeedValue()) + " Capacity:\t" + fmt.format(profile.getCapacityValue()) + " Integration:\t" + fmt.format(profile.getIntegrationValue()) + " Active?\t" + profile.getIsActive() + " Failing?\t" + profile.getIsFailing()); } else { System.out.println("Peer " + profile.getPeer().toBase64().substring(0,4) + " [" + (organizer.isFast(peer) ? "F+R " : organizer.isHighCapacity(peer) ? "R " : organizer.isFailing(peer) ? "X " : " ") + "]: " + "\t Speed:\t" + fmt.format(profile.getSpeedValue()) + " Capacity:\t" + fmt.format(profile.getCapacityValue()) + " Integration:\t" + fmt.format(profile.getIntegrationValue()) + " Active?\t" + profile.getIsActive() + " Failing?\t" + profile.getIsFailing()); } } System.out.println("Thresholds:"); System.out.println("Speed: " + num(organizer.getSpeedThreshold()) + " (" + organizer.countFastPeers() + " fast peers)"); System.out.println("Capacity: " + num(organizer.getCapacityThreshold()) + " (" + organizer.countHighCapacityPeers() + " reliable peers)"); } }
package com.bkromhout.ruqus; import android.app.Activity; import android.content.Context; import android.content.res.TypedArray; import android.os.Bundle; import android.os.Parcel; import android.os.Parcelable; import android.support.annotation.NonNull; import android.support.v4.graphics.drawable.DrawableCompat; import android.util.AttributeSet; import android.view.View; import android.widget.*; import com.afollestad.materialdialogs.DialogAction; import com.afollestad.materialdialogs.MaterialDialog; import com.wdullaer.materialdatetimepicker.date.DatePickerDialog; import io.realm.Sort; import java.util.ArrayList; import java.util.Calendar; /** * RealmQueryView * @author bkromhout */ public class RealmQueryView extends FrameLayout implements DatePickerDialog.OnDateSetListener { private enum Mode { MAIN, C_BUILD, S_BUILD } /* Views for main mode. */ private RelativeLayout mainCont; private RQVCard queryableChooser; private ScrollView scrollView; private LinearLayout partsCont; private RQVCard sortChooser; /* Views for either builder mode. */ private RelativeLayout builderCont; private TextView builderHeader; private LinearLayout builderParts; private Button cancelButton; private Button saveButton; /* Views for condition builder mode. */ private Spinner fieldChooser; private Spinner conditionalChooser; /* Views for sort builder mode. */ private Button addSortField; /** * Current theme type. */ private RuqusTheme theme = null; /** * Current user query. */ private RealmUserQuery ruq; /** * Current mode. */ private Mode mode; /** * Simple name of the current {@link Queryable} class. */ private String currClassName; /** * List of current visible flat field names; changes when {@link #currClassName} changes. */ private ArrayList<String> currVisibleFlatFieldNames; /* Variables for the condition builder. */ /** * Index of the query part currently being worked on. */ private int currPartIdx; /** * Real name of the field currently selected in the condition builder. */ private String currFieldName; /** * {@link FieldType} of the field currently selected in the condition builder; changes when {@link #currFieldName} * changes. */ private FieldType currFieldType; /** * Real name of the transformer/conditional currently selected in the condition builder. */ private String currTransName; /** * Holds IDs of views added to the {@link #builderParts} view group which we check to get arguments. */ private ArrayList<Integer> argViewIds; /* Variables for the sort builder. */ /** * Holds IDs of sort field spinners. */ private ArrayList<Integer> sortSpinnerIds; /** * Holds IDs of buttons which remove sort fields. */ private ArrayList<Integer> removeSortBtnIds; /** * Holds IDs of radio groups which set sort directions. */ private ArrayList<Integer> sortDirRgIds; /* Constructors. */ public RealmQueryView(Context context) { this(context, null, null, null); } public RealmQueryView(Context context, RealmUserQuery ruq) { this(context, null, ruq, null); } public RealmQueryView(Context context, RuqusTheme theme) { this(context, null, null, theme); } public RealmQueryView(Context context, RealmUserQuery ruq, RuqusTheme theme) { this(context, null, ruq, theme); } public RealmQueryView(Context context, AttributeSet attrs) { this(context, attrs, null, null); } public RealmQueryView(Context context, AttributeSet attrs, RealmUserQuery ruq, RuqusTheme theme) { super(context, attrs); this.ruq = ruq; this.theme = theme; init(context, attrs); } /** * Sets the theme of the view and any child views. * @param theme Theme to switch to. */ public void setTheme(RuqusTheme theme) { this.theme = theme; // Set theme on queryable and sort choosers. queryableChooser.setTheme(theme); sortChooser.setTheme(theme); // Set theme on all condition cards. for (int i = 0; i < partsCont.getChildCount(); i++) ((RQVCard2) partsCont.getChildAt(i)).setTheme(theme); // TODO Set for builder modes. } /** * Check whether the {@link RealmUserQuery} that this {@link RealmQueryView} currently has is fully-formed. * @return True if query is fully-formed, otherwise false. */ public boolean isQueryValid() { return ruq.isQueryValid(); } /** * Get the {@link RealmUserQuery} which this {@link RealmQueryView} currently has. Note that {@link RealmUserQuery} * implements {@link Parcelable}, which allows it to be passed around quickly and easily. * <p/> * This method does not guarantee that the returned query will be fully-formed and valid. Call {@link * RealmUserQuery#isQueryValid()} to check for validity before calling {@link RealmUserQuery#execute()}. * @return Realm user query object. */ public RealmUserQuery getRealmUserQuery() { return this.ruq; } /** * Set up this {@link RealmQueryView} using the given {@link RealmUserQuery}. * @param ruq Realm user query to use to set up this {@link RealmQueryView}. Must be fully-formed. */ public void setRealmUserQuery(RealmUserQuery ruq) { this.ruq = ruq; setupUsingRUQ(); } /** * Initialize our view. * @param context Context to use. * @param attrs Attributes. */ private void init(Context context, AttributeSet attrs) { inflate(context, R.layout.realm_query_view, this); // Find main mode views. mainCont = (RelativeLayout) findViewById(R.id.main); queryableChooser = (RQVCard) findViewById(R.id.queryable_type); scrollView = (ScrollView) findViewById(R.id.main_scroll_view); partsCont = (LinearLayout) findViewById(R.id.query_parts); sortChooser = (RQVCard) findViewById(R.id.sort_type); // Find common builder mode views. builderCont = (RelativeLayout) findViewById(R.id.builder); builderHeader = (TextView) findViewById(R.id.builder_header); builderParts = (LinearLayout) findViewById(R.id.builder_parts); cancelButton = (Button) findViewById(R.id.cancel); saveButton = (Button) findViewById(R.id.save); // Find condition builder views. fieldChooser = (Spinner) findViewById(R.id.field_chooser); conditionalChooser = (Spinner) findViewById(R.id.conditional_chooser); // Find sort builder views. addSortField = (Button) findViewById(R.id.add_sort_field); // Read attributes. initAttrs(context, attrs); // Initialize UI. mode = Mode.MAIN; initUi(); // Create a new RealmUserQuery if we weren't given one. if (ruq == null) ruq = new RealmUserQuery(); } /** * Initializes the view using the given attributes. * @param context Context to use. * @param attrs Attributes. */ private void initAttrs(Context context, AttributeSet attrs) { TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.RealmQueryView); if (theme == null) { // Get theme, default to light. theme = typedArray.getResourceId(R.styleable.RealmQueryView_ruqus_theme, 0) == 0 ? RuqusTheme.LIGHT : RuqusTheme.DARK; } typedArray.recycle(); } /** * Initialize the UI. */ private void initUi() { setTheme(theme); // Set up main mode views. // Set click handlers for queryable and sort choosers. queryableChooser.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { onQueryableChooserClicked(); } }); sortChooser.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { onSortChooserClicked(); } }); // Set up common builder views. cancelButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { switchMode(Mode.MAIN); } }); saveButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { onSaveClicked(); } }); // Set up condition builder views. fieldChooser.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { // Make sure user didn't select nothing. String selStr = (String) parent.getItemAtPosition(position); if (Ruqus.CHOOSE_FIELD.equals(selStr)) { currFieldName = null; onSelCondFieldChanged(); return; } // It's a real field. String realFieldName = Ruqus.fieldFromVisibleField(currClassName, selStr); if (currFieldName == null || currFieldName.equals(realFieldName)) { // We only care if if was changed. currFieldName = realFieldName; onSelCondFieldChanged(); } } @Override public void onNothingSelected(AdapterView<?> parent) { currFieldName = null; onSelCondFieldChanged(); } }); conditionalChooser.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { // Make sure user didn't select nothing. String selStr = (String) parent.getItemAtPosition(position); if (Ruqus.CHOOSE_CONDITIONAL.equals(selStr)) { currTransName = null; updateArgViews(); return; } // It's a real transformer. String realTransName = Ruqus.transformerNameFromVisibleName(selStr, false); if (currTransName == null || currTransName.equals(realTransName)) { // We only care if it was changed. currTransName = realTransName; updateArgViews(); } } @Override public void onNothingSelected(AdapterView<?> parent) { currTransName = null; updateArgViews(); } }); // Set up sort builder views. addSortField.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { addSortFieldView(-1, null); } }); // Finish setup. if (ruq == null) { // If we don't have a realm user query already, setup is very minimal, we just disable the scrollview and // sort choosers. setConditionsAndSortEnabled(false); return; } // If we have a RUQ already, we need to draw our view accordingly. setupUsingRUQ(); } @Override protected Parcelable onSaveInstanceState() { // Allow parent classes to save state. Parcelable superState = super.onSaveInstanceState(); SavedState ss = new SavedState(superState); // Save our state. ss.theme = this.theme; ss.ruq = this.ruq; ss.mode = this.mode; ss.currClassName = this.currClassName; ss.currVisibleFlatFieldNames = this.currVisibleFlatFieldNames; if (this.mode == Mode.C_BUILD) { // Only save condition builder variables if we're in that mode. ss.currPartIdx = this.currPartIdx; ss.currFieldName = this.currFieldName; ss.currFieldType = this.currFieldType; ss.currTransName = this.currTransName; ss.argViewIds = this.argViewIds; } else if (this.mode == Mode.S_BUILD) { // Only save sort builder variables if we're in that mode. ss.sortSpinnerIds = this.sortSpinnerIds; ss.removeSortBtnIds = this.removeSortBtnIds; ss.sortDirRgIds = this.sortDirRgIds; } return ss; } @Override protected void onRestoreInstanceState(Parcelable state) { //Allow parent classes to restore state. if (!(state instanceof SavedState)) { super.onRestoreInstanceState(state); return; } SavedState ss = (SavedState) state; super.onRestoreInstanceState(ss.getSuperState()); // Restore our state. this.theme = ss.theme; this.ruq = ss.ruq; this.mode = ss.mode; this.currClassName = ss.currClassName; this.currVisibleFlatFieldNames = ss.currVisibleFlatFieldNames; if (this.mode == Mode.C_BUILD) { // Only try to restore condition builder variables if we were in that mode. this.currPartIdx = ss.currPartIdx; this.currFieldName = ss.currFieldName; this.currFieldType = ss.currFieldType; this.currTransName = ss.currTransName; this.argViewIds = ss.argViewIds; } else if (this.mode == Mode.S_BUILD) { // Only try to restore sort builder variables if we were in that mode. this.sortSpinnerIds = ss.sortSpinnerIds; this.removeSortBtnIds = ss.removeSortBtnIds; this.sortDirRgIds = ss.sortDirRgIds; } } /** * Switches the view between main and various builder modes. * @param mode Mode to switch to. */ private void switchMode(Mode mode) { // Make sure we clean up if coming from a builder mode, or hide the main container if going to one. if (this.mode == Mode.C_BUILD && mode == Mode.MAIN) tearDownConditionBuilderMode(); else if (this.mode == Mode.S_BUILD && mode == Mode.MAIN) tearDownSortBuilderMode(); else if (this.mode == Mode.MAIN && mode != this.mode) mainCont.setVisibility(GONE); // Switch mode and UI. switch (mode) { case MAIN: mainCont.setVisibility(VISIBLE); break; case C_BUILD: { initConditionBuilderMode(currPartIdx >= partsCont.getChildCount() - 1 ? null : ruq.getConditions().get(currPartIdx)); builderCont.setVisibility(VISIBLE); break; } case S_BUILD: initSortBuilderMode(ruq.getSortFields(), ruq.getSortDirs()); builderCont.setVisibility(VISIBLE); break; } } /** * Set up the view using the current value of {@link #ruq}. */ private void setupUsingRUQ() { if (ruq == null || !ruq.isQueryValid()) return; // Set queryable class. String realName = ruq.getQueryClass().getSimpleName(); setQueryable(realName, Ruqus.getClassData().visibleNameOf(realName)); // Set sort fields (if present). sortChooser.setMode(ruq.getSortFields().size() > 0 ? RQVCard.Mode.CARD : RQVCard.Mode.OUTLINE); sortChooser.setCardText("Sorted by " + ruq.getSortString()); // Add part cards. partsCont.removeAllViews(); // Make sure parts container is empty first! for (Condition condition : ruq.getConditions()) appendPartView(condition); // Append an add part view. appendAddPartView(); } /** * Sets the "enabled" state of the query parts container and the sort chooser. * @param enabled If true, enable views. Otherwise disable them. */ private void setConditionsAndSortEnabled(boolean enabled) { scrollView.setEnabled(enabled); sortChooser.setEnabled(enabled); } /** * Creates an {@link RQVCard2} and sets it to card mode, filling it in using the given {@code condition}. * @param condition Condition to use to fill the card's text. */ private void appendPartView(Condition condition) { if (condition == null) throw new IllegalArgumentException("Must provide a Condition."); // Get visible condition string. String visCondString = condition.toString(); // Create a new card. RQVCard2 cond = new RQVCard2(getContext(), theme); cond.setMode(RQVCard2.Mode.CARD); // Set index tag to the current child count of the conditions container, since that will be this item's index // once it is added to the end of it. Also set content tag to the same as the current content. cond.setTag(R.id.index, partsCont.getChildCount()); cond.setTag(R.id.curr_val, visCondString); // Set the card's listener and long click listener. cond.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { onConditionClicked((Integer) v.getTag(R.id.index)); } }); cond.setOnLongClickListener(new OnLongClickListener() { @Override public boolean onLongClick(View v) { onPartLongClicked((Integer) v.getTag(R.id.index)); return true; } }); // Set a unique view ID. cond.setId(Util.getUniqueViewId()); // Add to the parts container. partsCont.addView(cond); } /** * Creates an {@link RQVCard2} and sets it to outlines mode with the texts "Add Operator" and "Add Condition", then * adds it to the end of {@link #partsCont}. */ private void appendAddPartView() { // Only add the view if we have the same number of views and conditions currently (indicates each view is // tied to a condition. if (ruq != null && partsCont.getChildCount() == ruq.conditionCount()) { RQVCard2 add = new RQVCard2(getContext(), theme); add.setMode(RQVCard2.Mode.OUTLINES); add.setOutlineText(R.string.add_operator_nl, R.string.add_condition_nl); // Set tag to the current child count of the conditions container, since that will be this item's index // once it is added to the end of it. add.setTag(R.id.index, partsCont.getChildCount()); // Set the outline text views' OnClickListeners. add.setOutline1ClickListener(new OnClickListener() { @Override public void onClick(View v) { onOperatorClicked((Integer) v.getTag(R.id.index), null); } }); add.setOutline2ClickListener(new OnClickListener() { @Override public void onClick(View v) { onConditionClicked((Integer) v.getTag(R.id.index)); } }); // Set a unique view ID. add.setId(Util.getUniqueViewId()); // Add to the parts container. partsCont.addView(add); } } /** * Clears the view back to its initial state and sets {@link #ruq} to a new instance of {@link RealmUserQuery}. */ private void reset() { // Switch to main mode (will tear down builder modes if necessary). switchMode(Mode.MAIN); // New RUQ. ruq = new RealmUserQuery(); currClassName = null; currVisibleFlatFieldNames = null; currPartIdx = -1; // Reset choosers back to outline mode. queryableChooser.setMode(RQVCard.Mode.OUTLINE); sortChooser.setMode(RQVCard.Mode.OUTLINE); // Clear all children from condition container. partsCont.removeAllViews(); // Disable conditions container and sort chooser. setConditionsAndSortEnabled(false); } /** * Show a dialog with the visible names of all classes annotated with {@link Queryable}. */ private void onQueryableChooserClicked() { new MaterialDialog.Builder(getContext()) .title(R.string.choose_queryable_title) .items(Ruqus.getClassData().getVisibleNames(true)) .itemsCallback(new MaterialDialog.ListCallback() { @Override public void onSelection(MaterialDialog dialog, View itemView, int which, CharSequence text) { String realName = Ruqus.classNameFromVisibleName(text.toString()); reset(); setQueryable(realName, text.toString()); ruq.setQueryClass(realName); } }) .show(); } /** * Called when the queryable class has been set. Only affects the view, not {@link #ruq}. * @param visibleName Visible name of the queryable class. */ private void setQueryable(String realName, String visibleName) { // Set instance vars. currClassName = realName; currVisibleFlatFieldNames = Ruqus.visibleFlatFieldsForClass(currClassName); currVisibleFlatFieldNames.add(0, Ruqus.CHOOSE_FIELD); // Set condition builder field chooser's adapter. fieldChooser.setAdapter(makeFieldAdapter()); // Set queryable chooser's card text and mode. queryableChooser.setCardText(visibleName); queryableChooser.setMode(RQVCard.Mode.CARD); // Append an add view to the conditions container, then enable the conditions container and sort chooser. appendAddPartView(); setConditionsAndSortEnabled(true); } private ArrayAdapter<String> makeFieldAdapter() { return new ArrayAdapter<>(getContext(), android.R.layout.simple_spinner_dropdown_item, currVisibleFlatFieldNames); } /** * Called when an {@link RQVCard2}'s outline text view which reads "Add Operator", or a card which has been filled * in with a real operator, is clicked. Shows a dialog of visible names of all no-args transformers (AKA, * "Operators"). * @param index Index of the card in the conditions container. * @param currVal The text which is currently on the card, or null if the card is in outline mode. */ private void onOperatorClicked(final int index, final String currVal) { new MaterialDialog.Builder(getContext()) .title(index == partsCont.getChildCount() - 1 ? R.string.add_operator : R.string.change_operator) .items(Ruqus.getTransformerData().getVisibleNoArgNames()) .itemsCallback(new MaterialDialog.ListCallback() { @Override public void onSelection(MaterialDialog dialog, View itemView, int which, CharSequence text) { if (currVal == null || !currVal.equals(text.toString())) setOperator(index, text.toString()); } }) .show(); } /** * Called when a card has been set as an operator card. * @param index Index of the card in the conditions container. * @param visibleName String to put on the card. */ private void setOperator(int index, String visibleName) { String realName = Ruqus.transformerNameFromVisibleName(visibleName, true); RQVCard2 card = (RQVCard2) partsCont.getChildAt(index); card.setTag(R.id.curr_val, visibleName); if (index == partsCont.getChildCount() - 1) { // This was an outline-mode card before this, and ruq doesn't have a condition for it. // Set the card's card listener and long click listener. card.setCardClickListener(new OnClickListener() { @Override public void onClick(View v) { onOperatorClicked((Integer) v.getTag(R.id.index), (String) v.getTag(R.id.curr_val)); } }); card.setOnLongClickListener(new OnLongClickListener() { @Override public boolean onLongClick(View v) { onPartLongClicked((Integer) v.getTag(R.id.index)); return true; } }); // Set the card's text. card.setCardText(visibleName); // Set the card's mode to CARD. card.setMode(RQVCard2.Mode.CARD); // Create a new condition; we just need to set the transformer's real name and the realm class's name // since it's an no-args condition. Condition condition = new Condition(); condition.setTransformer(realName); condition.setRealmClass(currClassName); // Add the condition to the query. ruq.getConditions().add(condition); // Finally, append another add view to the conditions container. appendAddPartView(); } else { // This was a card-mode card already, ruq already had a condition for it. // Update card text. card.setCardText(visibleName); // Update condition. Condition condition = ruq.getConditions().get(index); condition.setTransformer(realName); condition.setRealmClass(currClassName); } } /** * Called when an {@link RQVCard2}'s outline text view which reads "Add Condition", or when a card which has been * filled in with a real condition, is clicked. Switches to condition builder mode. * @param index Index of the card in the conditions container. */ private void onConditionClicked(final int index) { this.currPartIdx = index; switchMode(Mode.C_BUILD); } /** * Called when the sort mode chooser is clicked. Switches to sort builder mode. */ private void onSortChooserClicked() { switchMode(Mode.S_BUILD); } /** * Show a dialog asking if we want to delete the long-clicked card. * @param index Index of the card in the conditions container. */ private void onPartLongClicked(final int index) { new MaterialDialog.Builder(getContext()) .title(R.string.remove_operator) .negativeText(R.string.no) .positiveText(R.string.yes) .onPositive(new MaterialDialog.SingleButtonCallback() { @Override public void onClick(@NonNull MaterialDialog dialog, @NonNull DialogAction which) { // Remove from RUQ. ruq.getConditions().remove(index); // Remove from conditions container. partsCont.removeViewAt(index); } }) .show(); } /** * Called when the builder's save button is clicked (in either builder mode). */ private void onSaveClicked() { switch (mode) { case C_BUILD: { // Validate field. if (currFieldName == null || currFieldType == null) { Toast.makeText(getContext(), R.string.error_must_set_field, Toast.LENGTH_LONG).show(); return; } // Validate conditional. if (currTransName == null) { Toast.makeText(getContext(), R.string.error_must_set_conditional, Toast.LENGTH_LONG).show(); return; } // Validate and get args. Object[] args = getArgsIfValid(); if (args == null) return; // Get card. RQVCard2 card = (RQVCard2) partsCont.getChildAt(currPartIdx); // Create or get condition. Condition condition = currPartIdx == partsCont.getChildCount() - 1 ? new Condition() : ruq.getConditions().get(currPartIdx); // Fill in/update the condition. if (condition.getRealmClass() == null) condition.setRealmClass(currClassName); condition.setField(currFieldName); condition.setTransformer(currTransName); condition.setArgs(args); // Get the visible condition string. String visCondString = condition.toString(); // Set the card's text (and its tag). card.setTag(R.id.curr_val, visCondString); card.setCardText(visCondString); // If the card is still in OUTLINES mode, we know this is a new Condition, and that we need to do a bit // more setup for the card prior to adding the Condition to the query and switching back to MAIN mode. if (card.getMode() == RQVCard2.Mode.OUTLINES) { // New condition, we need to set the card up a bit more too. // Set the card's listener and long click listener. card.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { onConditionClicked((Integer) v.getTag(R.id.index)); } }); card.setOnLongClickListener(new OnLongClickListener() { @Override public boolean onLongClick(View v) { onPartLongClicked((Integer) v.getTag(R.id.index)); return true; } }); // Set the card's mode to CARD. card.setMode(RQVCard2.Mode.CARD); // Add the condition to the query. ruq.getConditions().add(condition); // Finally, append another add view to the conditions container. appendAddPartView(); } break; } case S_BUILD: { ArrayList<String> sortFields = new ArrayList<>(); ArrayList<Sort> sortDirs = new ArrayList<>(); // Get sort fields. for (Integer sortSpinnerId : sortSpinnerIds) sortFields.add(Ruqus.fieldFromVisibleField(currClassName, (String) ((Spinner) builderParts.findViewById(sortSpinnerId)).getSelectedItem())); // Get sort dirs. for (Integer sortDirRgId : sortDirRgIds) sortDirs.add(((RadioGroup) builderParts.findViewById(sortDirRgId)) .getCheckedRadioButtonId() == R.id.asc ? Sort.ASCENDING : Sort.DESCENDING); // Set ruq sort fields. ruq.setSorts(sortFields, sortDirs); // Set sort chooser mode and/or card text. sortChooser.setMode(sortFields.size() > 0 ? RQVCard.Mode.CARD : RQVCard.Mode.OUTLINE); sortChooser.setCardText("Sorted by " + ruq.getSortString()); break; } } // Switch back to main container. switchMode(Mode.MAIN); } /* Methods for Condition builder mode. */ /** * Called to set up the builder views for condition builder mode. * @param condition Condition to use to pre-fill views. */ private void initConditionBuilderMode(Condition condition) { // Make sure currPartIdx is set. if (currPartIdx == -1) throw new IllegalArgumentException("Must set currPartIdx for C_BUILD mode."); // Set up views. builderHeader.setText(R.string.edit_condition_title); fieldChooser.setVisibility(VISIBLE); // Set up vars. argViewIds = new ArrayList<>(); // Set up from condition. if (condition != null) { // Select correct value in field chooser. TODO need to manually set up next part? currFieldName = condition.getField(); fieldChooser.setSelection(currVisibleFlatFieldNames.indexOf( Ruqus.visibleFieldFromField(currClassName, currFieldName))); // Select correct transformer. TODO ditto? currTransName = condition.getTransformer(); conditionalChooser.setSelection(Ruqus.getTransformerData().getVisibleNames().indexOf( Ruqus.getTransformerData().visibleNameOf(currTransName))); // Fill in argument views. fillArgViews(condition.getArgs()); } } /** * Called to clean up the builder views when finishing condition builder mode. */ private void tearDownConditionBuilderMode() { // Clean up views. fieldChooser.setVisibility(GONE); conditionalChooser.setVisibility(GONE); builderParts.removeAllViews(); // Clean up vars. currPartIdx = -1; currFieldName = null; currFieldType = null; currTransName = null; argViewIds = null; } /** * Called when the selection in {@link #fieldChooser} changes. Sets up other views based on value of {@link * #currFieldName}. */ private void onSelCondFieldChanged() { // If currFieldName is null, we should tear some things down. if (currFieldName == null) { currFieldType = null; updateArgViews(); conditionalChooser.setVisibility(GONE); return; } // Get field type from field. currFieldType = Ruqus.typeEnumForField(currClassName, currFieldName); // Get the list of visible names for all transformers which accept the given field type. ArrayList<String> conditionals = Ruqus.getTransformerData().getVisibleNames(currFieldType.getClazz()); conditionals.add(0, Ruqus.CHOOSE_CONDITIONAL); // Create an array adapter from it. ArrayAdapter<String> conditionalAdapter = new ArrayAdapter<>(getContext(), android.R.layout.simple_spinner_dropdown_item, conditionals); // Bind the adapter to the spinner. conditionalChooser.setAdapter(conditionalAdapter); // Make sure conditional chooser is visible. conditionalChooser.setVisibility(VISIBLE); } /** * Update the views in {@link #builderParts} so that they allow the user to input the correct type of data based on * the current {@link #currFieldName}, {@link #currFieldType}, and {@link #currTransName}. */ private void updateArgViews() { builderParts.removeAllViews(); if (currFieldName == null || currFieldType == null || currTransName == null) { currTransName = null; return; } // Add views based on the field type and the number of arguments that the transformer accepts. int numArgs = Ruqus.numberOfArgsFor(currTransName); for (int i = 0; i < numArgs; i++) { final int id = Util.getUniqueViewId(); switch (currFieldType) { case BOOLEAN: RadioGroup rgFalseTrue = (RadioGroup) View.inflate(getContext(), R.layout.rg_false_true, null); rgFalseTrue.setId(id); builderParts.addView(rgFalseTrue); break; case DATE: LinearLayout tvDateCont = (LinearLayout) View.inflate(getContext(), R.layout.tv_date, null); // Set up date button to open date picker dialog. tvDateCont.findViewById(R.id.choose_date).setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { TextView tvDate = (TextView) builderParts.findViewById(id).findViewById(R.id.tv_date); makeDatePickerDialog(Util.calFromString(tvDate.getText().toString()), id) .show(((Activity) getContext()).getFragmentManager(), "RuqusDPD"); } }); tvDateCont.setId(id); builderParts.addView(tvDateCont); break; case DOUBLE: case FLOAT: EditText etDecimal = (EditText) View.inflate(getContext(), R.layout.et_decimal, null); etDecimal.setId(id); builderParts.addView(etDecimal); break; case INTEGER: case LONG: case SHORT: EditText etWholeNumber = (EditText) View.inflate(getContext(), R.layout.et_whole_number, null); etWholeNumber.setId(id); builderParts.addView(etWholeNumber); break; case STRING: EditText etString = (EditText) View.inflate(getContext(), R.layout.et_string, null); etString.setId(id); builderParts.addView(etString); break; } argViewIds.add(id); } } /** * Make the date picker dialog to show for date inputs. * @param c Calendar instance to use to set initially selected date. * @param id ID of view to modify upon callback. * @return The newly-created date picker dialog. */ private DatePickerDialog makeDatePickerDialog(Calendar c, int id) { DatePickerDialog dpd = DatePickerDialog.newInstance(RealmQueryView.this, c.get(Calendar.YEAR), c.get(Calendar.MONTH), c.get(Calendar.DAY_OF_MONTH)); dpd.setThemeDark(theme == RuqusTheme.DARK); Bundle b = new Bundle(); b.putInt("ID", id); dpd.setArguments(b); dpd.autoDismiss(true); return dpd; } /** * Fill in argument views in condition builder using the values passed in {@code args}. * @param args Values retrieved using {@link Condition#getArgs()}. */ private void fillArgViews(Object[] args) { for (int i = 0; i < argViewIds.size(); i++) { View view = builderCont.findViewById(argViewIds.get(i)); switch (currFieldType) { case BOOLEAN: ((RadioGroup) view).check((Boolean) args[i] ? R.id.rb_true : R.id.rb_false); break; case DATE: ((TextView) view.findViewById(R.id.tv_date)).setText(Util.dateFormat.format(args[i])); break; case DOUBLE: case FLOAT: case INTEGER: case LONG: case SHORT: ((TextView) view).setText(String.valueOf(args[i])); break; case STRING: ((TextView) view).setText((String) args[i]); break; } } } /** * Called when a date was picked in the date picker dialog. */ @Override public void onDateSet(DatePickerDialog view, int year, int monthOfYear, int dayOfMonth) { int index = view.getArguments().getInt("ID", -1); if (index == -1) throw new IllegalArgumentException("Bad index!"); ((TextView) builderCont.findViewById(argViewIds.get(index)).findViewById(R.id.tv_date)).setText( Util.stringFromDateInts(year, monthOfYear, dayOfMonth)); } /** * Attempts to validates the values that the user has provided to the condition builder, and returns them if they * pass. * @return Array of input values as Objects, or null if any input values are invalid. */ private Object[] getArgsIfValid() { Object[] args = new Object[argViewIds.size()]; for (int i = 0; i < argViewIds.size(); i++) { View argView = builderParts.findViewById(argViewIds.get(i)); switch (currFieldType) { case BOOLEAN: // There's no way that neither of the radio buttons are checked :) RadioGroup rgFalseTrue = (RadioGroup) argView; args[i] = rgFalseTrue.getCheckedRadioButtonId() != R.id.rb_false; continue; case DATE: TextView tvDate = (TextView) argView.findViewById(R.id.tv_date); if (tvDate.length() == 0) { tvDate.setError(getContext().getString(R.string.error_empty_input)); return null; } args[i] = Util.calFromString(tvDate.getText().toString()).getTime(); continue; case DOUBLE: EditText etDouble = (EditText) argView; if (etDouble.length() == 0) { etDouble.setError(getContext().getString(R.string.error_empty_input)); return null; } args[i] = FieldType.parseNumberIfPossible(currFieldType, etDouble.getText().toString()); if (args[i] == null) { etDouble.setError(getContext().getString(R.string.error_out_of_range_double)); return null; } continue; case FLOAT: EditText etFloat = (EditText) argView; if (etFloat.length() == 0) { etFloat.setError(getContext().getString(R.string.error_empty_input)); return null; } args[i] = FieldType.parseNumberIfPossible(currFieldType, etFloat.getText().toString()); if (args[i] == null) { etFloat.setError(getContext().getString(R.string.error_out_of_range_float)); return null; } continue; case INTEGER: EditText etInteger = (EditText) argView; if (etInteger.length() == 0) { etInteger.setError(getContext().getString(R.string.error_empty_input)); return null; } args[i] = FieldType.parseNumberIfPossible(currFieldType, etInteger.getText().toString()); if (args[i] == null) { etInteger.setError(getContext().getString(R.string.error_out_of_range_integer)); return null; } continue; case LONG: EditText etLong = (EditText) argView; if (etLong.length() == 0) { etLong.setError(getContext().getString(R.string.error_empty_input)); return null; } args[i] = FieldType.parseNumberIfPossible(currFieldType, etLong.getText().toString()); if (args[i] == null) { etLong.setError(getContext().getString(R.string.error_out_of_range_long)); return null; } continue; case SHORT: EditText etShort = (EditText) argView; if (etShort.length() == 0) { etShort.setError(getContext().getString(R.string.error_empty_input)); return null; } args[i] = FieldType.parseNumberIfPossible(currFieldType, etShort.getText().toString()); if (args[i] == null) { etShort.setError(getContext().getString(R.string.error_out_of_range_short)); return null; } continue; case STRING: EditText etString = (EditText) argView; args[i] = etString.getText().toString(); if (((String) args[i]).isEmpty()) { etString.setError(getContext().getString(R.string.error_empty_input)); return null; } } } return args; } /* Methods for sort builder mode. */ /** * Called to set up the builder views for sort builder mode. * @param sortFields Current sort fields. * @param sortDirs Current sort directions. */ private void initSortBuilderMode(ArrayList<String> sortFields, ArrayList<Sort> sortDirs) { // Set up views. builderHeader.setText(R.string.choose_sort_fields_title); addSortField.setVisibility(VISIBLE); // Set up vars. sortSpinnerIds = new ArrayList<>(); removeSortBtnIds = new ArrayList<>(); sortDirRgIds = new ArrayList<>(); // If present, add current sort fields. for (int i = 0; i < sortFields.size(); i++) addSortFieldView(currVisibleFlatFieldNames.indexOf(Ruqus.visibleFieldFromField(currClassName, sortFields.get(i))), sortDirs.get(i)); } /** * Called to clean up the builder views when finishing sort builder mode. */ private void tearDownSortBuilderMode() { // Clean up views. addSortField.setVisibility(GONE); builderParts.removeAllViews(); // Clean up vars. sortSpinnerIds = null; removeSortBtnIds = null; sortDirRgIds = null; } /** * Called to add a sort field view to {@link #builderParts} (and optionally pre-fill it). * @param selectedFieldPos Position in spinner to pre-select, or -1. * @param sortDir Sort direction to pre-select, or null. */ private void addSortFieldView(int selectedFieldPos, Sort sortDir) { final int idx = sortSpinnerIds.size(); RelativeLayout sortPart = (RelativeLayout) View.inflate(getContext(), R.layout.sort_part, null); // Set label text. ((TextView) sortPart.findViewById(R.id.sort_field_label)).setText( getContext().getString(R.string.sort_field_label, idx)); // Set up spinner. Spinner fieldSpinner = (Spinner) sortPart.findViewById(R.id.sort_field); fieldSpinner.setAdapter(makeFieldAdapter()); fieldSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { // Make sure the user didn't select nothing. String selStr = (String) parent.getItemAtPosition(position); if (Ruqus.CHOOSE_FIELD.equals(selStr)) builderParts.findViewById(sortDirRgIds.get(idx)).setVisibility(GONE); else setSortDirOptions(idx, selStr); } @Override public void onNothingSelected(AdapterView<?> parent) { builderParts.findViewById(sortDirRgIds.get(idx)).setVisibility(GONE); } }); // Set up remove button. ImageButton removeButton = (ImageButton) sortPart.findViewById(R.id.remove_field); DrawableCompat.setTint(removeButton.getDrawable(), theme == RuqusTheme.LIGHT ? Ruqus.DARK_TEXT_COLOR : Ruqus.LIGHT_TEXT_COLOR); removeButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { removeSortField(idx); } }); // Set up radio group. RadioGroup sortDirRg = (RadioGroup) sortPart.findViewById(R.id.rg_sort_dir); // Generate unique view IDs for the spinner, button, and radio group and set them. int fieldSpinnerId = Util.getUniqueViewId(); int removeButtonId = Util.getUniqueViewId(); int sortDirRgId = Util.getUniqueViewId(); fieldSpinner.setId(fieldSpinnerId); removeButton.setId(removeButtonId); sortDirRg.setId(sortDirRgId); // Add IDs to lists. sortSpinnerIds.add(fieldSpinnerId); removeSortBtnIds.add(removeButtonId); sortDirRgIds.add(sortDirRgId); // If that was our third sort field, we disable the button that adds them. if (sortSpinnerIds.size() == 3) addSortField.setEnabled(false); // Add this to the builder container (Add one, since we want it added after the header view). builderParts.addView(sortPart); // Fill this sort field layout's views in if necessary. if (selectedFieldPos != -1) { // Select the correct item in the spinner. TODO do we need to manually update the radio buttons' text? fieldSpinner.setSelection(selectedFieldPos); // Select the correct radio button. sortDirRg.check(sortDir == Sort.ASCENDING ? R.id.asc : R.id.desc); } } /** * Called when a remove sort field button is clicked. */ private void removeSortField(int index) { // Remove IDs from lists. sortSpinnerIds.remove(index); removeSortBtnIds.remove(index); sortDirRgIds.remove(index); // Remove from builder container (Add one to the index, since the header view is there). builderParts.removeViewAt(index); // Enable add button. addSortField.setEnabled(true); } /** * Called when the selection on a sort field spinner is changed. */ private void setSortDirOptions(int index, String visibleFieldName) { String[] pretty = Ruqus.typeEnumForField(currClassName, Ruqus.fieldFromVisibleField( currClassName, visibleFieldName)).getPrettySortStrings(); RadioGroup rg = (RadioGroup) builderParts.findViewById(sortDirRgIds.get(index)); ((RadioButton) rg.findViewById(R.id.asc)).setText(pretty[0]); ((RadioButton) rg.findViewById(R.id.desc)).setText(pretty[1]); rg.setVisibility(VISIBLE); } /* State persistence. */ /** * Helps us easily save and restore our view's state. */ static class SavedState extends BaseSavedState { // General variables. Will always be written/read. RuqusTheme theme; RealmUserQuery ruq; Mode mode; String currClassName; ArrayList<String> currVisibleFlatFieldNames; // Condition builder variables. Will only be written/read if we're in condition builder mode. int currPartIdx; String currFieldName; FieldType currFieldType; String currTransName; ArrayList<Integer> argViewIds; // Sort builder variables. Will only be written/read if we're in sort builder mode. ArrayList<Integer> sortSpinnerIds; ArrayList<Integer> removeSortBtnIds; ArrayList<Integer> sortDirRgIds; public SavedState(Parcelable superState) { super(superState); } private SavedState(Parcel in) { super(in); // Read general variables' values back. int tmpTheme = in.readInt(); this.theme = tmpTheme == -1 ? null : RuqusTheme.values()[tmpTheme]; this.ruq = in.readParcelable(RealmUserQuery.class.getClassLoader()); int tmpMode = in.readInt(); this.mode = tmpMode == -1 ? null : Mode.values()[tmpMode]; this.currClassName = in.readString(); this.currVisibleFlatFieldNames = in.createStringArrayList(); if (this.mode == Mode.C_BUILD) { // If we were in condition builder mode, read those variables' values back. this.currPartIdx = in.readInt(); this.currFieldName = in.readString(); int tmpCurrFieldType = in.readInt(); this.currFieldType = tmpCurrFieldType == -1 ? null : FieldType.values()[tmpCurrFieldType]; this.currTransName = in.readString(); this.argViewIds = new ArrayList<>(); in.readList(this.argViewIds, Integer.class.getClassLoader()); } else if (this.mode == Mode.S_BUILD) { // If we were in sort builder mode, read those variables' values back. this.sortSpinnerIds = new ArrayList<>(); in.readList(this.sortSpinnerIds, Integer.class.getClassLoader()); this.removeSortBtnIds = new ArrayList<>(); in.readList(this.removeSortBtnIds, Integer.class.getClassLoader()); this.sortDirRgIds = new ArrayList<>(); in.readList(this.sortDirRgIds, Integer.class.getClassLoader()); } } @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel out, int flags) { super.writeToParcel(out, flags); // Write general variables' values. out.writeInt(this.theme == null ? -1 : this.theme.ordinal()); out.writeParcelable(this.ruq, flags); out.writeInt(this.mode == null ? -1 : this.mode.ordinal()); out.writeString(this.currClassName); out.writeStringList(this.currVisibleFlatFieldNames); if (this.mode == Mode.C_BUILD) { // If we're in condition builder mode, write those variables' values. out.writeInt(this.currPartIdx); out.writeString(this.currFieldName); out.writeInt(this.currFieldType == null ? -1 : this.currFieldType.ordinal()); out.writeString(this.currTransName); out.writeList(this.argViewIds); } else if (this.mode == Mode.S_BUILD) { // If we're in sort builder mode, write those variables' values. out.writeList(this.sortSpinnerIds); out.writeList(this.removeSortBtnIds); out.writeList(this.sortDirRgIds); } } public static final Parcelable.Creator<SavedState> CREATOR = new Parcelable.Creator<SavedState>() { @Override public SavedState createFromParcel(Parcel in) {return new SavedState(in);} @Override public SavedState[] newArray(int size) {return new SavedState[size];} }; } }
package ch.booyakasha.engine; import java.awt.Canvas; import java.awt.Color; import java.awt.Dimension; import java.awt.Graphics2D; import java.awt.Point; import java.awt.image.BufferStrategy; import java.util.ArrayList; import java.util.Random; import javax.swing.JFrame; import javax.swing.JPanel; import ch.booyakasha.engine.GameKeyInputHandler.GameKeyInformation; /** * Game logic, including game loop */ public class Game extends Canvas implements IGame { /** * Cached instance of the current configuration */ private Configuration config; private BufferStrategy strategy; private boolean gameRunning = true; private ArrayList<Entity> entities = new ArrayList<Entity>(); private Entity player; private Entity background1; private Entity background2; private Random rand = new Random(); private GameKeyInputHandler gameKeyInputHandler; private long startTime; //private MouseInput mouseInput; /** True if game logic needs to be applied this loop, normally as a result of a game event */ private boolean logicRequiredThisLoop = false; /** * Construct our game and set it running. */ public Game() { config = Configuration.getCurrent(); // TODO: Switch to AWT components, mixing SWING and AWT was not a good idea JFrame container = new JFrame("Booyakasha"); JPanel panel = (JPanel)container.getContentPane(); panel.setPreferredSize(new Dimension(config.screenWidth, config.screenHeight)); //panel.setLayout(null); setSize(config.screenWidth, config.screenHeight); panel.add(this); /* mouseInput = new MouseInput(); addMouseListener(mouseInput); addMouseMotionListener(mouseInput); */ // Tell AWT not to bother repainting our canvas since we're going to do that our self in accelerated mode setIgnoreRepaint(true); container.pack(); container.setResizable(false); container.setVisible(true); container.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); // Add key input handler gameKeyInputHandler = new GameKeyInputHandler(); addKeyListener(gameKeyInputHandler); requestFocus(); // Create the buffering strategy which will allow AWT to manage our accelerated graphics createBufferStrategy(2); strategy = getBufferStrategy(); startGame(); } /** * Starts a new game */ private void startGame() { entities.clear(); initEntities(); startTime = System.currentTimeMillis(); } private void initEntities() { // Create background background1 = new BackgroundEntity(this, "/sprites/background.png", 0, -(4000 - config.screenHeight)); entities.add(background1); background2 = new BackgroundEntity(this, "/sprites/background.png", 0, -(4000 + 4000 - config.screenHeight)); entities.add(background2); // Create the player object player = new AliGEntity(this, "/sprites/alig.gif", 370, 500); entities.add(player); // Create enemies /* for(int row = 0; row < 5; row++) { for(int x = 0; x < 12; x++) { Entity enemy = new EnemyEntity(this, "/sprites/enemy.gif", 100 + (x * 50), 50 + row * 36); entities.add(enemy); } } */ } private void spawnEnemies(long deltaSpawn) { for(int i = 0; i <= deltaSpawn / 1000; i++) { Entity enemy = new EnemyEntity(this, "/sprites/enemy.png", rand.nextInt(config.screenWidth - 2*config.horizontalPadding) + config.horizontalPadding, -50); entities.add(enemy); } } /** * Notification from a game entity that the logic of the game should be run */ public void updateLogic() { logicRequiredThisLoop = true; } /** * Main game loop */ public void gameLoop() { long lastLoopTime = System.currentTimeMillis(); long lastSpawnTime = lastLoopTime; // Keep looping round until the game ends while(gameRunning) { long delta = System.currentTimeMillis() - lastLoopTime; lastLoopTime = System.currentTimeMillis(); // Get graphic context Graphics2D g = (Graphics2D)strategy.getDrawGraphics(); // Draw street g.setColor(new Color(168, 168, 168)); g.fillRect(0, 0, 800, 600); // Draw buildings on both sides g.setColor(new Color(139, 69, 19)); g.fillRect(0, 0, 50, 600); g.fillRect(750, 0, 800, 600); // Let entities move for(int i = 0; i < entities.size(); i++) { Entity entity = entities.get(i); entity.move(delta); } // Spawn enemies long deltaSpawn = (lastLoopTime - lastSpawnTime); if(deltaSpawn / 1000 > 1) { spawnEnemies(deltaSpawn); lastSpawnTime = lastLoopTime; } // Draw all entities for(int i = 0; i < entities.size(); i++) { Entity entity = entities.get(i); entity.draw(g); } if(logicRequiredThisLoop) { for (int i=0;i<entities.size();i++) { Entity entity = entities.get(i); entity.doLogic(); } logicRequiredThisLoop = false; } // Draw mouse /* Point p = mouseInput.getCurrent(); if(p != null) { g.setColor(Color.GREEN); g.drawOval(p.x - 20, p.y - 20, 40, 40); g.drawRect(p.x - 5, p.y - 5, 10, 10); } */ g.dispose(); strategy.show(); GameKeyInformation keyInfo = gameKeyInputHandler.getKeyInformation(); double playerVelocity = 0; if(keyInfo.isLeftPressed() && !keyInfo.isRightPressed()) { // Set left velocity playerVelocity = -config.playerVelocity; } else if(!keyInfo.isLeftPressed() && keyInfo.isRightPressed()) { // Set right velocity playerVelocity = config.playerVelocity; } player.setHorizontalMovement(playerVelocity); // Pause for 10ms --> 100 fps try { Thread.sleep(10); } catch (Exception e) {} } } public static void main(String argv[]) { Configuration.setCurrent(new Configuration()); Game game = new Game(); game.gameLoop(); } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package info.hska.erstiduell.view; import info.hska.erstiduell.Config; import info.hska.erstiduell.Game; import info.hska.erstiduell.buzzer.Key; import info.hska.erstiduell.questions.QuestionLibrary; import java.awt.Color; import java.awt.GraphicsDevice; import java.awt.event.FocusEvent; import java.awt.event.KeyEvent; import java.io.File; import java.util.HashSet; import java.util.Set; import javax.swing.JFileChooser; import javax.swing.JOptionPane; import javax.swing.JToggleButton; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.filechooser.FileFilter; /** * * @author timroes */ public final class ConfigWindow extends javax.swing.JFrame { private Game game; private Key[] hotkeys = new Key[4]; /** Creates new form ConfigWindow2 */ public ConfigWindow(Game game) { this.game = game; hotkeys[0] = new Key(85, 1, "U"); hotkeys[1] = new Key(73, 1, "I"); hotkeys[2] = new Key(65, 1, "A"); hotkeys[3] = new Key(69, 1, "E"); initComponents(); updateErrors(); } /** This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { java.awt.GridBagConstraints gridBagConstraints; jToggleButton1 = new javax.swing.JToggleButton(); error = new javax.swing.JLabel(); questionsPanel = new javax.swing.JPanel(); loadQuestions = new javax.swing.JButton(); questionFile = new javax.swing.JTextField(); questionLabel = new javax.swing.JLabel(); gamePanel = new javax.swing.JPanel(); jLabel1 = new javax.swing.JLabel(); amountPlayers = new javax.swing.JSpinner(); colorPanel = new javax.swing.JPanel(); jLabel2 = new javax.swing.JLabel(); fR = new javax.swing.JSpinner(); fG = new javax.swing.JSpinner(); fB = new javax.swing.JSpinner(); jLabel3 = new javax.swing.JLabel(); bR = new javax.swing.JSpinner(); bG = new javax.swing.JSpinner(); bB = new javax.swing.JSpinner(); outputPanel = new javax.swing.JPanel(); jLabel4 = new javax.swing.JLabel(); outputMonitor = new javax.swing.JComboBox(); inputSettings = new javax.swing.JPanel(); jLabel5 = new javax.swing.JLabel(); input1 = new javax.swing.JToggleButton(); jLabel6 = new javax.swing.JLabel(); jLabel7 = new javax.swing.JLabel(); jLabel8 = new javax.swing.JLabel(); input2 = new javax.swing.JToggleButton(); input3 = new javax.swing.JToggleButton(); input4 = new javax.swing.JToggleButton(); ok = new javax.swing.JButton(); jToggleButton1.setText("jToggleButton1"); setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE); getContentPane().setLayout(new java.awt.GridBagLayout()); error.setFont(new java.awt.Font("Liberation Sans", 1, 15)); error.setForeground(new java.awt.Color(255, 0, 0)); error.setText("jLabel5"); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.weighty = 1.0; gridBagConstraints.insets = new java.awt.Insets(10, 10, 5, 10); getContentPane().add(error, gridBagConstraints); questionsPanel.setBorder(javax.swing.BorderFactory.createTitledBorder("Question Library")); questionsPanel.setLayout(new java.awt.GridBagLayout()); loadQuestions.setText("Load Questions"); loadQuestions.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { loadQuestionsActionPerformed(evt); } }); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.insets = new java.awt.Insets(2, 5, 2, 5); questionsPanel.add(loadQuestions, gridBagConstraints); questionFile.setText("[No file loaded]"); questionFile.setEnabled(false); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.weightx = 1.0; gridBagConstraints.insets = new java.awt.Insets(2, 5, 2, 5); questionsPanel.add(questionFile, gridBagConstraints); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.insets = new java.awt.Insets(2, 5, 2, 5); questionsPanel.add(questionLabel, gridBagConstraints); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 1; gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.weightx = 1.0; gridBagConstraints.weighty = 1.0; gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5); getContentPane().add(questionsPanel, gridBagConstraints); gamePanel.setBorder(javax.swing.BorderFactory.createTitledBorder("Game Settings")); gamePanel.setLayout(new java.awt.GridBagLayout()); jLabel1.setText("Number of Teams:"); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 0; gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints.insets = new java.awt.Insets(2, 5, 2, 5); gamePanel.add(jLabel1, gridBagConstraints); amountPlayers.setModel(new javax.swing.SpinnerNumberModel(2, 2, 4, 1)); amountPlayers.addChangeListener(new ChangeListener() { public void stateChanged(ChangeEvent e) { if((Integer)amountPlayers.getValue() < 4) input4.setEnabled(false); else input4.setEnabled(true); if((Integer)amountPlayers.getValue() < 3) input3.setEnabled(false); else input3.setEnabled(true); updateErrors(); } }); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.insets = new java.awt.Insets(2, 5, 2, 5); gamePanel.add(amountPlayers, gridBagConstraints); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 2; gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.weighty = 1.0; gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5); getContentPane().add(gamePanel, gridBagConstraints); colorPanel.setBorder(javax.swing.BorderFactory.createTitledBorder("Color Settings")); colorPanel.setLayout(new java.awt.GridBagLayout()); jLabel2.setText("Text color:"); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 0; gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints.insets = new java.awt.Insets(2, 5, 2, 5); colorPanel.add(jLabel2, gridBagConstraints); fR.setModel(new javax.swing.SpinnerNumberModel(255, 0, 255, 1)); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.weightx = 1.0; gridBagConstraints.insets = new java.awt.Insets(2, 5, 2, 5); colorPanel.add(fR, gridBagConstraints); fG.setModel(new javax.swing.SpinnerNumberModel(255, 0, 255, 1)); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.weightx = 1.0; gridBagConstraints.insets = new java.awt.Insets(2, 5, 2, 5); colorPanel.add(fG, gridBagConstraints); fB.setModel(new javax.swing.SpinnerNumberModel(255, 0, 255, 1)); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.insets = new java.awt.Insets(2, 5, 2, 5); colorPanel.add(fB, gridBagConstraints); jLabel3.setText("Backgroundcolor:"); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 1; gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints.insets = new java.awt.Insets(2, 5, 2, 5); colorPanel.add(jLabel3, gridBagConstraints); bR.setModel(new javax.swing.SpinnerNumberModel(0, 0, 255, 1)); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 1; gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.weightx = 1.0; gridBagConstraints.insets = new java.awt.Insets(2, 5, 2, 5); colorPanel.add(bR, gridBagConstraints); bG.setModel(new javax.swing.SpinnerNumberModel(0, 0, 255, 1)); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 2; gridBagConstraints.gridy = 1; gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.weightx = 1.0; gridBagConstraints.insets = new java.awt.Insets(2, 5, 2, 5); colorPanel.add(bG, gridBagConstraints); bB.setModel(new javax.swing.SpinnerNumberModel(0, 0, 255, 1)); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 3; gridBagConstraints.gridy = 1; gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.weightx = 1.0; gridBagConstraints.insets = new java.awt.Insets(2, 5, 2, 5); colorPanel.add(bB, gridBagConstraints); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 3; gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.weighty = 1.0; gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5); getContentPane().add(colorPanel, gridBagConstraints); outputPanel.setBorder(javax.swing.BorderFactory.createTitledBorder("Output Settings")); outputPanel.setLayout(new java.awt.GridBagLayout()); jLabel4.setText("Output Monitor:"); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 0; gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints.insets = new java.awt.Insets(2, 5, 2, 5); outputPanel.add(jLabel4, gridBagConstraints); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.weightx = 1.0; gridBagConstraints.insets = new java.awt.Insets(2, 5, 2, 5); outputPanel.add(outputMonitor, gridBagConstraints); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 4; gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.weighty = 1.0; gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5); getContentPane().add(outputPanel, gridBagConstraints); inputSettings.setBorder(javax.swing.BorderFactory.createTitledBorder("Input Settings")); inputSettings.setLayout(new java.awt.GridBagLayout()); teamInitialize(jLabel5, input1, 1); teamInitialize(jLabel6, input2, 2); teamInitialize(jLabel7, input3, 3); teamInitialize(jLabel8, input4, 4); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 5; gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.weighty = 1.0; gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5); getContentPane().add(inputSettings, gridBagConstraints); ok.setText("Start Game"); ok.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { okActionPerformed(evt); } }); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 6; gridBagConstraints.anchor = java.awt.GridBagConstraints.EAST; gridBagConstraints.weighty = 1.0; gridBagConstraints.insets = new java.awt.Insets(5, 5, 5, 5); getContentPane().add(ok, gridBagConstraints); pack(); }// </editor-fold>//GEN-END:initComponents void teamInitialize(javax.swing.JLabel label, JToggleButton input, final int teamNo) { label.setText("Team " + teamNo ); java.awt.GridBagConstraints gridBagConstraints = new java.awt.GridBagConstraints(); if (hotkeys[teamNo - 1] != null) { input.setText(hotkeys[teamNo - 1].toString()); }else { input.setText("[Assign key]"); } if (((Integer) amountPlayers.getValue()) < teamNo) { input.setEnabled(false); } input.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { inputActionPerformed(evt); } }); input.addFocusListener(new java.awt.event.FocusAdapter() { public void focusLost(java.awt.event.FocusEvent evt) { inputFocusLost(evt, teamNo); } }); input.addKeyListener(new java.awt.event.KeyAdapter() { public void keyPressed(java.awt.event.KeyEvent evt) { inputKeyPressed(evt, teamNo); } }); gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.weightx = 1.0; gridBagConstraints.gridx = 0; gridBagConstraints.gridy = teamNo - 1; gridBagConstraints.insets = new java.awt.Insets(2, 5, 2, 5); inputSettings.add(input, gridBagConstraints); } private void loadQuestionsActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_loadQuestionsActionPerformed JFileChooser fc = new JFileChooser(); File workingDirectory = new File(System.getProperty("user.dir")); fc.setCurrentDirectory(workingDirectory); fc.setFileFilter(new FileFilter() { @Override public boolean accept(File f) { return f.getName().endsWith(".qes") || f.isDirectory(); } @Override public String getDescription() { return "Question Library (.qes)"; } }); fc.showDialog(this, "Load"); if (fc.getSelectedFile() == null) return; try { QuestionLibrary.loadQuestions(fc.getSelectedFile()); } catch (Exception ex) { for(StackTraceElement el : ex.getStackTrace()) System.out.println(el.getFileName() + " " + el.getMethodName() + ":" + el.getLineNumber()); JOptionPane.showMessageDialog(this, "Could not read question library correctly:\n\n" + "[" + ex.getClass().getSimpleName() + "]\n" + ((ex.getMessage() == null) ? "" : ex.getMessage()), "Error", JOptionPane.ERROR_MESSAGE); return; } questionLabel.setText(String.valueOf(QuestionLibrary.getInstance() .getQuestionAmount()) + " questions"); questionFile.setText(fc.getSelectedFile().getAbsolutePath()); updateErrors(); }//GEN-LAST:event_loadQuestionsActionPerformed private void okActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_okActionPerformed if(!error.isVisible()) { Key[] hotkeys = new Key[(Integer)amountPlayers.getValue()]; System.arraycopy(this.hotkeys, 0, hotkeys, 0, hotkeys.length); game.configured(new Config( (Integer)amountPlayers.getValue(), (GraphicsDevice)outputMonitor.getSelectedItem(), new Color((Integer)fR.getValue(), (Integer)fG.getValue(), (Integer)fB.getValue()), new Color((Integer)bR.getValue(), (Integer)bG.getValue(), (Integer)bB.getValue()), hotkeys )); setVisible(false); dispose(); } }//GEN-LAST:event_okActionPerformed private void inputActionPerformed(java.awt.event.ActionEvent evt) { JToggleButton input = (JToggleButton)evt.getSource(); input.setSelected(true); input.setText("[Press key now]"); } private void inputKeyPressed(KeyEvent evt, int player) { JToggleButton input = (JToggleButton)evt.getSource(); if(input.isSelected()) { hotkeys[player - 1] = new Key(evt); input.setText(hotkeys[player - 1].toString()); input.setSelected(false); updateErrors(); } } private void inputFocusLost(FocusEvent evt, int player) { JToggleButton bt = ((JToggleButton)evt.getSource()); bt.setSelected(false); if(hotkeys[player - 1] == null) { bt.setText("[Assign key]"); } else { bt.setText(hotkeys[player - 1].toString()); } } private void updateErrors() { ok.setEnabled(true); StringBuilder str = new StringBuilder("<html><body>Errors:"); if(QuestionLibrary.getInstance() == null) { ok.setEnabled(false); str.append("<br>You must load a question library."); } int players = (Integer)amountPlayers.getValue(); if(hotkeys[0] == null || hotkeys[1] == null || (players >= 3 && hotkeys[2] == null) || (players >= 4 && hotkeys[3] == null)) { str.append("<br>You must assign a key for every player."); ok.setEnabled(false); } Set set = new HashSet(); for(Key k : hotkeys) { if(k != null) if(set.contains(k)) { ok.setEnabled(false); str.append("<br>Each player must have a different key."); break; } else { set.add(k); } } if(!ok.isEnabled()) { error.setText(str.toString()); error.setVisible(true); } else { error.setVisible(false); } } public void setMonitors(GraphicsDevice[] devices) { outputMonitor.removeAllItems(); for(GraphicsDevice d : devices) { outputMonitor.addItem(d); } } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JSpinner amountPlayers; private javax.swing.JSpinner bB; private javax.swing.JSpinner bG; private javax.swing.JSpinner bR; private javax.swing.JPanel colorPanel; private javax.swing.JLabel error; private javax.swing.JSpinner fB; private javax.swing.JSpinner fG; private javax.swing.JSpinner fR; private javax.swing.JPanel gamePanel; private javax.swing.JToggleButton input1; private javax.swing.JToggleButton input2; private javax.swing.JToggleButton input3; private javax.swing.JToggleButton input4; private javax.swing.JPanel inputSettings; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel2; private javax.swing.JLabel jLabel3; private javax.swing.JLabel jLabel4; private javax.swing.JLabel jLabel5; private javax.swing.JLabel jLabel6; private javax.swing.JLabel jLabel7; private javax.swing.JLabel jLabel8; private javax.swing.JToggleButton jToggleButton1; private javax.swing.JButton loadQuestions; private javax.swing.JButton ok; private javax.swing.JComboBox outputMonitor; private javax.swing.JPanel outputPanel; private javax.swing.JTextField questionFile; private javax.swing.JLabel questionLabel; private javax.swing.JPanel questionsPanel; // End of variables declaration//GEN-END:variables }
package com.apigee.utils; import org.w3c.dom.*; import org.xml.sax.ErrorHandler; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import javax.xml.XMLConstants; import javax.xml.namespace.NamespaceContext; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.transform.*; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathFactory; import java.io.*; import java.util.*; import java.util.logging.ConsoleHandler; import java.util.logging.Level; import java.util.logging.Logger; public class XMLUtils { private static final Logger LOGGER = Logger.getLogger(XMLUtils.class.getName()); private static final ConsoleHandler handler = new ConsoleHandler(); static { LOGGER.setLevel(Level.WARNING); // PUBLISH this level handler.setLevel(Level.WARNING); LOGGER.addHandler(handler); } private DocumentBuilder builder; private static final Set<String> skiplist = new HashSet<String>( Arrays.asList(new String[] { "http: "http: "http://schemas.xmlsoap.org/soap/encoding/" })); private static String elementName = ":{local-name()}"; public XMLUtils() throws Exception { builder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); } public boolean isValidXML(String xml) { try { DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilderFactory.setNamespaceAware(true); documentBuilderFactory.setValidating(true); DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); documentBuilder.setErrorHandler(new ErrorHandler() { @Override public void warning(SAXParseException exception) throws SAXException { } @Override public void fatalError(SAXParseException exception) throws SAXException { } @Override public void error(SAXParseException exception) throws SAXException { } }); documentBuilder.parse(new InputSource(new StringReader(xml))); return true; } catch (Exception e) { return false; } } public Document readXML(String resource) throws Exception { LOGGER.entering(XMLUtils.class.getName(), new Object() { }.getClass().getEnclosingMethod().getName()); try { try { //first attempt to read as resource; otherwise, it must be a file return builder.parse(getClass().getResourceAsStream(resource)); } catch (IllegalArgumentException npe) { return builder.parse(new File(resource)); } } catch (SAXParseException spe) { // Error generated by the parser LOGGER.severe("\n** Parsing error" + ", line " + spe.getLineNumber() + ", uri " + spe.getSystemId()); LOGGER.severe(" " + spe.getMessage()); throw spe; } catch (SAXException sxe) { LOGGER.severe(sxe.getMessage()); throw sxe; } catch (IOException ioe) { LOGGER.severe(ioe.getMessage()); throw ioe; } catch (Exception e) { LOGGER.severe(e.getMessage()); throw e; } } public void writeXML(Document document, String filePath) throws Exception { LOGGER.entering(XMLUtils.class.getName(), new Object() { }.getClass().getEnclosingMethod().getName()); try { document.setXmlStandalone(true); // Use a Transformer for output TransformerFactory tFactory = TransformerFactory.newInstance(); Transformer transformer = tFactory.newTransformer(); transformer.setOutputProperty(OutputKeys.STANDALONE, "yes"); transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "no"); transformer.setOutputProperty(OutputKeys.VERSION, "1.0"); transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); transformer.setOutputProperty(OutputKeys.INDENT, "yes"); DOMSource source = new DOMSource(document); File f = new File(filePath); FileOutputStream fos = new FileOutputStream(f, false); StreamResult result = new StreamResult(fos); transformer.transform(source, result); fos.close(); } catch (IOException ioe) { LOGGER.severe(ioe.getMessage()); throw ioe; } catch (TransformerConfigurationException tce) { LOGGER.severe("* Transformer Factory error"); LOGGER.severe(" " + tce.getMessage()); throw tce; } catch (TransformerException te) { LOGGER.severe("* Transformation error"); LOGGER.severe(" " + te.getMessage()); throw te; } } public void generateOtherNamespacesXSLT(String filePath, String operationName, String xsltTemplate, Map<String, String> namespace) throws Exception { LOGGER.entering(XMLUtils.class.getName(), new Object() { }.getClass().getEnclosingMethod().getName()); DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance(); docBuilderFactory.setNamespaceAware(true); DocumentBuilder docBuilder = docBuilderFactory.newDocumentBuilder(); Document document = docBuilder.parse(new InputSource(new StringReader(xsltTemplate))); Node stylesheet = document.getDocumentElement(); for (Map.Entry<String, String> entry : namespace.entrySet()) { String key = entry.getKey(); String value = entry.getValue(); if (!skiplist.contains(value)) { if (key.length() == 0) { ((Element) stylesheet).setAttribute("xmlns:ns", value); } else { ((Element) stylesheet).setAttribute("xmlns:" + key, value); } } } writeXML(document, filePath + operationName + "-add-other-namespaces.xslt"); LOGGER.exiting(XMLUtils.class.getName(), new Object() { }.getClass().getEnclosingMethod().getName()); } public Document getXMLFromString(String xml) throws Exception { LOGGER.entering(XMLUtils.class.getName(), new Object() { }.getClass().getEnclosingMethod().getName()); try { InputSource is = new InputSource(new StringReader(xml)); Document document = builder.parse(is); return cloneDocument(document); } catch (SAXException | IOException e) { LOGGER.severe(e.getMessage()); throw e; } catch (Exception e) { LOGGER.severe(e.getMessage()); throw e; } } // public Document getXMLFromJSONString(String jsonString) throws Exception { // JsonObject json = new JsonParser().parse(jsonString).getAsJsonObject(); // return getXMLFromString(XML.toString(json)); private String extractElement(String fullElementName) { if (fullElementName.indexOf(":") != -1) { String elements[] = fullElementName.split(":"); return elements[1]; } else { return fullElementName; } } public List<String> getElementList(String xml) throws Exception { LOGGER.entering(XMLUtils.class.getName(), new Object() { }.getClass().getEnclosingMethod().getName()); List<String> elementList = new ArrayList<String>(); try { Document doc = builder.parse(new InputSource(new StringReader(xml))); XPathFactory xpf = XPathFactory.newInstance(); XPath xp = xpf.newXPath(); /** * Gets the first child element of a node. * * @param node * the node to get the child from * @return the first element child of {@code node} or {@code null} if none * @throws NullPointerException * if {@code node} is {@code null} */ public Element getFirstChildElement(Node node) throws Exception { LOGGER.entering(XMLUtils.class.getName(), new Object() { }.getClass().getEnclosingMethod().getName()); node = node.getFirstChild(); while (node != null && node.getNodeType() != Node.ELEMENT_NODE) { node = node.getNextSibling(); } return (Element) node; } /** * * @param doc * @return * @throws Exception */ public Document cloneDocument(Document doc) throws Exception { Document clonedDoc = builder.newDocument(); clonedDoc.appendChild(clonedDoc.importNode(doc.getDocumentElement(), true)); return clonedDoc; } public void generateRootNamespaceXSLT(String xsltTemplate, String target, String operationName, String prefix, String rootElement, String namespaceUri, Map<String, String> namespace) throws Exception { DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance(); docBuilderFactory.setNamespaceAware(true); DocumentBuilder docBuilder = docBuilderFactory.newDocumentBuilder(); if (rootElement == null) { rootElement = operationName; } String newXsltTemplate = new Scanner(getClass() .getResourceAsStream(xsltTemplate), "UTF-8") .useDelimiter("\\A").next() .replaceAll("@@ROOT", rootElement) .replaceAll("@@PREFIX", prefix) .replaceAll("@@NAMESPACE", namespaceUri); Document document = docBuilder.parse(new InputSource(new StringReader(newXsltTemplate))); Node stylesheet = document.getDocumentElement(); for (Map.Entry<String, String> entry : namespace.entrySet()) { String key = entry.getKey(); String value = entry.getValue(); if (!skiplist.contains(value)) { if (key.length() == 0) { ((Element) stylesheet).setAttribute("xmlns:ns", value); } else { ((Element) stylesheet).setAttribute("xmlns:" + key, value); } } } XPathFactory xpf = XPathFactory.newInstance(); XPath xp = xpf.newXPath(); // there's no default implementation for NamespaceContext xp.setNamespaceContext(new NamespaceContext() { @Override public Iterator<String> getPrefixes(String namespaceURI) { throw new UnsupportedOperationException(); } @Override public String getPrefix(String namespaceURI) { throw new UnsupportedOperationException(); } @Override public String getNamespaceURI(String prefix) { if (prefix == null) throw new NullPointerException("Null prefix"); else if ("xsl".equals(prefix)) return "http: else if ("xml".equals(prefix)) return XMLConstants.XML_NS_URI; return XMLConstants.NULL_NS_URI; } }); writeXML(document, target + operationName + "-add-namespace.xslt"); } private String getFullXPath(Node n) { // abort early if (null == n) return null; // declarations Node parent = null; Stack<Node> hierarchy = new Stack<Node>(); StringBuffer buffer = new StringBuffer(); // push element on stack hierarchy.push(n); switch (n.getNodeType()) { case Node.ATTRIBUTE_NODE: parent = ((Attr) n).getOwnerElement(); break; case Node.ELEMENT_NODE: parent = n.getParentNode(); break; case Node.DOCUMENT_NODE: parent = n.getParentNode(); break; default: throw new IllegalStateException("Unexpected Node type" + n.getNodeType()); } while (null != parent && parent.getNodeType() != Node.DOCUMENT_NODE) { // push on stack hierarchy.push(parent); // get parent of parent parent = parent.getParentNode(); } // construct xpath Object obj = null; while (!hierarchy.isEmpty() && null != (obj = hierarchy.pop())) { Node node = (Node) obj; boolean handled = false; if (node.getNodeType() == Node.ELEMENT_NODE) { Element e = (Element) node; // is this the root element? if (buffer.length() == 0) { // root element - simply append element name buffer.append(node.getNodeName()); } else { // child element - append slash and element name buffer.append("/"); buffer.append(node.getNodeName()); if (node.hasAttributes()) { // see if the element has a name or id attribute if (e.hasAttribute("id")) { // id attribute found - use that buffer.append("[@id='" + e.getAttribute("id") + "']"); handled = true; } else if (e.hasAttribute("name")) { // name attribute found - use that buffer.append("[@name='" + e.getAttribute("name") + "']"); handled = true; } } if (!handled) { // no known attribute we could use - get sibling index int prev_siblings = 1; Node prev_sibling = node.getPreviousSibling(); while (null != prev_sibling) { if (prev_sibling.getNodeType() == node.getNodeType()) { if (prev_sibling.getNodeName().equalsIgnoreCase(node.getNodeName())) { prev_siblings++; } } prev_sibling = prev_sibling.getPreviousSibling(); } buffer.append("[" + prev_siblings + "]"); } } } else if (node.getNodeType() == Node.ATTRIBUTE_NODE) { buffer.append("/@"); buffer.append(node.getNodeName()); } } // return buffer return buffer.toString(); } }
package be.fedict.dcat.scrapers; import be.fedict.dcat.helpers.Cache; import be.fedict.dcat.helpers.Page; import be.fedict.dcat.helpers.Storage; import be.fedict.dcat.vocab.DCAT; import be.fedict.dcat.vocab.MDR_LANG; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.Map; import javax.swing.text.html.HTML; import org.jsoup.Jsoup; import org.jsoup.nodes.Element; import org.jsoup.select.Elements; import org.openrdf.model.URI; import org.openrdf.model.vocabulary.DCTERMS; import org.openrdf.model.vocabulary.RDF; import org.openrdf.repository.RepositoryException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Scraper for Oostende DO2 website. * * @author Bart Hanssens <bart.hanssens@fedict.be> */ public class HtmlOostende extends Html { private final Logger logger = LoggerFactory.getLogger(HtmlOostende.class); private final static String CONTENT_ID = "content"; private final static String DIV_DESC = "opendata_long"; private final static String LINK_DATASETS = "ul.dataviews li.item a:has(span)"; private final static String LINK_DISTS = "div.odsub a.file"; private final static String LIST_CATS = "ul.listcategorien li a"; /** * Get the list of all the downloads (DCAT Dataset). * * @return List of URLs * @throws IOException */ private List<URL> scrapeDatasetList() throws IOException { List<URL> urls = new ArrayList<>(); URL base = getBase(); String front = makeRequest(base); Elements links = Jsoup.parse(front).select(LINK_DATASETS); for(Element link : links) { String href = link.attr(HTML.Attribute.HREF.toString()); urls.add(makeAbsURL(href)); } return urls; } /** * Scrape the site. * * @throws IOException */ @Override public void scrape() throws IOException { logger.info("Start scraping"); Cache cache = getCache(); List<URL> urls = cache.retrieveURLList(); if (urls.isEmpty()) { urls = scrapeDatasetList(); cache.storeURLList(urls); } logger.info("Found {} datasets on page", String.valueOf(urls.size())); logger.info("Start scraping (waiting between requests)"); int i = 0; for (URL u : urls) { Map<String, Page> page = cache.retrievePage(u); if (page.isEmpty()) { sleep(); if (++i % 100 == 0) { logger.info("Download {}...", Integer.toString(i)); } try { String html = makeRequest(u); cache.storePage(u, "", new Page(u, html)); } catch (IOException ex) { logger.error("Failed to scrape {}", u); } } } logger.info("Done scraping"); } /** * Generate DCAT distribution. * * @param store RDF store * @param dataset URI * @param front URL of the front page * @param link link element * @param i dist sequence * @param lang language code * @throws MalformedURLException * @throws RepositoryException */ private void generateDist(Storage store, URI dataset, URL access, Element link, int i, String lang) throws MalformedURLException, RepositoryException { String href = link.attr(HTML.Attribute.HREF.toString()); URL download = makeAbsURL(href); URL u = makeDistURL(access + "/" + i + "/" + lang); URI dist = store.getURI(u.toString()); logger.debug("Generating distribution {}", dist.toString()); store.add(dataset, DCAT.DISTRIBUTION, dist); store.add(dist, RDF.TYPE, DCAT.A_DISTRIBUTION); store.add(dist, DCTERMS.LANGUAGE, MDR_LANG.MAP.get(lang)); store.add(dist, DCTERMS.TITLE, link.text().trim(), lang); store.add(dist, DCAT.ACCESS_URL, access); store.add(dist, DCAT.DOWNLOAD_URL, download); store.add(dist, DCAT.MEDIA_TYPE, getFileExt(href)); } /** * Generate DCAT Dataset * * @param store RDF store * @param id dataset id * @param page * @throws MalformedURLException * @throws RepositoryException */ @Override protected void generateDataset(Storage store, String id, Map<String, Page> page) throws MalformedURLException, RepositoryException { String lang = getDefaultLang(); Page p = page.getOrDefault("", new Page()); String html = p.getContent(); URL u = p.getUrl(); Element content = Jsoup.parse(html).body().getElementById(CONTENT_ID); URI dataset = store.getURI(u.toString()); logger.debug("Generating dataset {}", dataset.toString()); Element h1 = content.getElementsByTag(HTML.Tag.H1.toString()).first(); if (h1 == null) { logger.warn("Empty title, skipping"); return; } String title = h1.text().trim(); Element div = content.getElementsByClass(DIV_DESC).first(); String desc = (div != null) ? div.text() : title; store.add(dataset, RDF.TYPE, DCAT.A_DATASET); store.add(dataset, DCTERMS.LANGUAGE, MDR_LANG.MAP.get(lang)); store.add(dataset, DCTERMS.TITLE, title, lang); store.add(dataset, DCTERMS.DESCRIPTION, desc, lang); store.add(dataset, DCTERMS.IDENTIFIER, makeHashId(u.toString())); store.add(dataset, DCAT.LANDING_PAGE, u); Elements cats = content.select(LIST_CATS); for (Element cat : cats) { store.add(dataset, DCAT.KEYWORD, cat.text(), lang); } int i = 0; Elements dists = content.select(LINK_DISTS); for(Element dist : dists) { generateDist(store, dataset, u, dist, ++i, lang); } } /** * Generate DCAT catalog information. * * @param store * @param catalog * @throws RepositoryException */ @Override public void generateCatalogInfo(Storage store, URI catalog) throws RepositoryException { super.generateCatalogInfo(store, catalog); store.add(catalog, DCTERMS.TITLE, "DCAT export Oostende D02", "en"); store.add(catalog, DCTERMS.LANGUAGE, MDR_LANG.NL); } /** * Generate DCAT. * * @param cache * @param store * @throws RepositoryException * @throws MalformedURLException */ @Override public void generateDcat(Cache cache, Storage store) throws RepositoryException, MalformedURLException { logger.info("Generate DCAT"); /* Get the list of all datasets */ List<URL> urls = cache.retrieveURLList(); for(URL u : urls) { Map<String,Page> page = cache.retrievePage(u); String id = makeHashId(u.toString()); generateDataset(store, id, page); } generateCatalog(store); } /** * HTML scraper Oostende DO2. * * @param caching * @param storage * @param base */ public HtmlOostende(File caching, File storage, URL base) { super(caching, storage, base); setName("oostende"); } }
package com.ate.autologin; import java.io.File; import java.util.ArrayList; import java.util.Base64; import java.util.HashMap; import java.util.List; import org.lwjgl.input.Keyboard; import net.minecraft.client.Minecraft; import net.minecraft.client.gui.FontRenderer; import net.minecraft.client.resources.IReloadableResourceManager; import net.minecraft.client.resources.SimpleReloadableResourceManager; import net.minecraft.client.resources.data.MetadataSerializer; import net.minecraft.client.settings.KeyBinding; import net.minecraft.util.ResourceLocation; import net.minecraft.util.text.TextComponentString; import net.minecraftforge.client.MinecraftForgeClient; import net.minecraftforge.client.event.ClientChatReceivedEvent; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.common.config.Configuration; import net.minecraftforge.fml.client.registry.ClientRegistry; import net.minecraftforge.fml.common.FMLCommonHandler; import net.minecraftforge.fml.common.Mod; import net.minecraftforge.fml.common.Mod.EventHandler; import net.minecraftforge.fml.common.event.FMLInitializationEvent; import net.minecraftforge.fml.common.event.FMLPostInitializationEvent; import net.minecraftforge.fml.common.event.FMLPreInitializationEvent; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; import net.minecraftforge.fml.common.gameevent.InputEvent; import net.minecraftforge.fml.common.network.FMLNetworkEvent.ClientConnectedToServerEvent; import net.minecraftforge.fml.common.network.FMLNetworkEvent.ClientDisconnectionFromServerEvent; @Mod(version=ModMain.VERSION,modid=ModMain.MODID,name=ModMain.NAME) public class ModMain { public static final String VERSION="1.1",MODID="autologin",NAME="Auto Login Mod",LittleName="AutoLog"; private IReloadableResourceManager mcResourceManager; private final MetadataSerializer metadataSerializer_ = new MetadataSerializer(); public static HashMap<String, String> passwordList; public static String defaultPassword=""; public static Configuration password_config; public static FontRenderer passwordFontRenderer; public static KeyBinding optionGui; @Mod.Instance(MODID) public static ModMain instance; public static boolean isConnect=false; public static boolean isinConnect=false; @SubscribeEvent public void onKeyPressed(InputEvent.KeyInputEvent event){ if(ModMain.optionGui.isPressed())Minecraft.getMinecraft().displayGuiScreen(new GuiOption()); } @SubscribeEvent public void onConnectOnServer(ClientConnectedToServerEvent event){ isConnect=false; } @SubscribeEvent public void onDisconnectOnServer(ClientDisconnectionFromServerEvent event){ isConnect=false; } @SubscribeEvent public void onChatReceive(ClientChatReceivedEvent event){ Minecraft mc=Minecraft.getMinecraft(); if(!Minecraft.getMinecraft().isSingleplayer() && !isinConnect){ if(event.getMessage().getUnformattedText().contains("OkLook")){ if(ModMain.passwordList.containsKey(mc.getCurrentServerData().serverIP)){ mc.thePlayer.sendChatMessage("/register "+ModMain.passwordList.get(mc.getCurrentServerData().serverIP+" "+ModMain.passwordList.get(mc.getCurrentServerData().serverIP))); isConnect=true; } else { mc.displayGuiScreen(new GuiConnection("register",2)); } }else if(event.getMessage().getUnformattedText().contains("LookHere")){ if(ModMain.passwordList.containsKey(mc.getCurrentServerData().serverIP)){ mc.thePlayer.sendChatMessage("/login "+ModMain.passwordList.get(mc.getCurrentServerData().serverIP)); isConnect=true; } else { mc.displayGuiScreen(new GuiConnection("login",1)); } } } } public static void syncPasswordList() { String category=Minecraft.getMinecraft().getSession().getUsername(); passwordList.clear(); String passEncode=Base64.getEncoder().encodeToString(String.format(defaultPassword).getBytes()); password_config.setCategoryComment(category, "Category for the username : "+category); passEncode=password_config.getString("defaultpassword", category, passEncode, ""); defaultPassword=new String(Base64.getDecoder().decode(passEncode)); String[] pass = password_config.getStringList("passwords", category, new String[]{}, ""); for (int i = 0; i < pass.length; i++) { String[] valueDecode=new String(Base64.getDecoder().decode(pass[i])).split(","); if(valueDecode.length==2){ passwordList.put(valueDecode[0], valueDecode[1]); } } password_config.save(); } public static void Message(String msg){ Minecraft.getMinecraft().thePlayer.addChatMessage(new TextComponentString("\u00a73[\u00a7a"+LittleName+"\u00a73]\u00a72 "+msg)); } public static void ChangePassword(String server,String newPassword){ passwordList.put(server, newPassword); String[] strs=new String[passwordList.size()];int i=0; for (String name: passwordList.keySet()){ String key =name.toString(); String value = passwordList.get(name).toString(); String valueEncode=Base64.getEncoder().encodeToString(String.format(key+","+value).getBytes()); strs[i]=valueEncode; i++;} password_config.get(Minecraft.getMinecraft().getSession().getUsername(), "passwords", passwordList.get(strs)).set(strs); password_config.save(); } public static void setDefaultPassword(String newPassword){ defaultPassword=newPassword; String valueEncode=Base64.getEncoder().encodeToString(String.format(newPassword).getBytes()); password_config.get(Minecraft.getMinecraft().getSession().getUsername(), "defaultpassword", defaultPassword).set(valueEncode); password_config.save(); } @EventHandler public void preinit(FMLPreInitializationEvent event){ passwordList=new HashMap<String, String>(); password_config=new Configuration(new File(new File(event.getModConfigurationDirectory(),"AutoLogin"), "password.cfg")); syncPasswordList(); ClientRegistry.registerKeyBinding(optionGui = new KeyBinding("key.autologin.loginOption", Keyboard.KEY_L, "key.autologin.categories")); MinecraftForge.EVENT_BUS.register(this); FMLCommonHandler.instance().bus().register(this); FMLCommonHandler.instance().bus().register(instance); } @EventHandler public void init(FMLInitializationEvent event){ } @EventHandler public void postinit(FMLPostInitializationEvent event){ Minecraft mc=Minecraft.getMinecraft(); passwordFontRenderer=new FontRenderer(mc.gameSettings, new ResourceLocation("textures/font/password.png"),mc.renderEngine, false); this.mcResourceManager = new SimpleReloadableResourceManager(metadataSerializer_); this.mcResourceManager.registerReloadListener(passwordFontRenderer); } }
package sokoban; import java.util.ArrayList; import org.lwjgl.input.Keyboard; import org.newdawn.slick.BasicGame; import org.newdawn.slick.Color; import org.newdawn.slick.GameContainer; import org.newdawn.slick.Graphics; import org.newdawn.slick.Input; import org.newdawn.slick.SlickException; import sokoban.data.EntryQueue; import sokoban.data.SokobanEntry; import sokoban.reader.Reader; import sokoban.reader.ReaderFile; import sokoban.reader.ReaderStdin; public class Sokoban extends BasicGame { ArrayList<SokobanEntry> entries; EntryQueue queue; float scale; int current; boolean drawList; boolean autoChange; Reader reader; int markingDrawFlags; private static final int listCount = 40; public Sokoban(String title, boolean fileReader, String filename) { super(title); scale = 20.0f; entries = new ArrayList<SokobanEntry>(); queue = new EntryQueue(); current = -1; drawList = true; autoChange = true; markingDrawFlags = 0xffffffff; if(fileReader) reader = new ReaderFile(queue, filename); else reader = new ReaderStdin(queue); } @Override public void init(GameContainer container) throws SlickException { new Thread(reader).start(); container.getInput().enableKeyRepeat(); } @Override public void render(GameContainer container, Graphics graphics) throws SlickException { if(current >= 0 && current < entries.size()) { SokobanEntry entry = entries.get(current); graphics.scale(scale, scale); entry.render(graphics, markingDrawFlags); graphics.resetTransform(); graphics.setColor(Color.white); graphics.drawString("Id: " + entry.getName() + " Path length: " + entry.getPathLength(), 10, container.getHeight() - 77); } try { Thread.sleep(5); } catch (InterruptedException ie) { } graphics.scale(1.0f, 1.0f); graphics.setColor(Color.white); graphics.drawString("Active markings: ", 10, container.getHeight() - 58); for(int i = 1; i <= 10; ++i) { graphics.setColor(((markingDrawFlags & (1 << (i % 10))) != 0) ? Color.white : Color.darkGray); graphics.drawString("" + (i % 10), 140 + i * 20, container.getHeight() - 58); } graphics.setColor(Color.white); graphics.drawString("Zoom: +/- Toggle list: L Clear data: C Auto change(" + (autoChange ? "on" : "off") + "): A", 10, container.getHeight() - 39); graphics.drawString("Change state: 1: Up/Down 5: Shift + Up/Down One page: Left/Right First/last: Shift + Left/Right", 10, container.getHeight() - 20); if(reader.hasError()) { graphics.setColor(Color.orange); graphics.drawString("Input error! " + reader.getError(), 10, container.getHeight() - 100); } if(drawList) { int low = current - listCount/2; int high = current + listCount; if(low < 0) { high -= low; } if(high > entries.size()) { low -= (high - entries.size()); } if(low < 0) low = 0; if(high > entries.size()) high = entries.size(); graphics.resetTransform(); for(int i = low; i < high; ++i) { graphics.setColor(i == current ? Color.red : Color.white); SokobanEntry entry = entries.get(i); String s = i + ": "; int k = Math.max(2 - (int)Math.log10(i==0?1:i), 0); for(int j = 0; j < k; ++j) s += " "; s += entry.getName() + " Path: " + entry.getPathLength(); graphics.drawString(s, container.getWidth() - 300, 7 + (i - low) * 15); } } } @Override public void update(GameContainer container, int dt) throws SlickException { SokobanEntry entry = queue.pop(); if(entry != null) { entries.add(entry); if(autoChange) current = entries.size() - 1; } try { Thread.sleep(5); } catch (InterruptedException ie) { } float d = (float)dt/1000.0f; Input input = container.getInput(); if(input.isKeyDown(Keyboard.KEY_MINUS) || input.isKeyDown(Keyboard.KEY_SUBTRACT) || input.isKeyDown(Keyboard.KEY_COMMA)) scale -= (scale * d); if(input.isKeyDown(Keyboard.KEY_ADD) || input.isKeyDown(13) || input.isKeyDown(Keyboard.KEY_PERIOD)) scale += (scale * d); if(input.isKeyPressed(Keyboard.KEY_UP)) { if(input.isKeyDown(Keyboard.KEY_RSHIFT) || input.isKeyDown(Keyboard.KEY_LSHIFT) ) current -= 5; else --current; if(current < 0) current = 0; } if (input.isKeyPressed(Keyboard.KEY_LEFT)) { if(input.isKeyDown(Keyboard.KEY_RSHIFT) || input.isKeyDown(Keyboard.KEY_LSHIFT) ) current = 0; else current -= listCount; if(current < 0) current = 0; } if(input.isKeyPressed(Keyboard.KEY_DOWN)) { if(input.isKeyDown(Keyboard.KEY_RSHIFT) || input.isKeyDown(Keyboard.KEY_LSHIFT) ) current += 5; else ++current; if(current >= entries.size()) current = entries.size() - 1; } if (input.isKeyPressed(Keyboard.KEY_RIGHT)) { if(input.isKeyDown(Keyboard.KEY_RSHIFT) || input.isKeyDown(Keyboard.KEY_LSHIFT) ) current = entries.size() - 1; else current += listCount; if(current >= entries.size()) current = entries.size() - 1; } if(input.isKeyPressed(Keyboard.KEY_L)) { drawList = !drawList; } if(input.isKeyPressed(Keyboard.KEY_A)) { autoChange = !autoChange; } if(input.isKeyPressed(Keyboard.KEY_C)) { entries.clear(); reader.onClear(); } for(int i = 1; i <= 10; ++i) { if(input.isKeyPressed(i + 1)) { if((markingDrawFlags & (1 << (i%10))) == 0) markingDrawFlags |= (1 << (i%10)); else markingDrawFlags &= ~(1 << (i%10)); } } } public void cleanUp() { } }
package com.codahale.metrics; import java.util.ArrayList; import java.util.List; import java.util.Map.Entry; public class JSON { private final List<Entry<String, Object>> entries; public void add(final String key, final Object value) { entries.add(new Entry<String, Object>() { @Override public String setValue(final Object value) { return null; } @Override public Object getValue() { return value; } @Override public String getKey() { return key; } }); } private final String indent(final int size) { String indent = ""; for (int i = 1; i <= size; i++) { indent += " "; } return indent; } public String render() { return render(0); } public String render(final int indentSize) { final String indent = indent(indentSize); String res = indent + "{\n"; int count = 0; for (final Entry<String, Object> e : entries) { count += 1; if (e.getValue() instanceof String) { res += indent + " '" + e.getKey() + "': '" + e.getValue(); } else if (e.getValue() instanceof JSON) { final JSON json = (JSON) e.getValue(); res += " '" + e.getKey() + "': \n"; res += json.render(indentSize + 4); continue; } else { res += indent + " '" + e.getKey() + "': '" + e.getValue().toString(); } if (count != entries.size()) { res += ",\n"; } else { res += "\n"; } } res += "}"; return res; } public JSON() { super(); this.entries = new ArrayList<Entry<String, Object>>(); } }
package interdroid.util.view; import interdroid.util.R; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import android.content.Context; import android.graphics.Bitmap; import android.graphics.PixelFormat; import android.util.AttributeSet; import android.view.Gravity; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.WindowManager; import android.widget.HeaderViewListAdapter; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.ListAdapter; import android.widget.ListView; /** * A DraggableListView which can handle drag and drop operations. * * @author nick <palmer@cs.vu.nl> * */ public class DraggableListView extends ListView { /** Logger access */ private static final Logger logger = LoggerFactory .getLogger(DraggableListView.class); /** Are we currently dragging? */ private boolean mDragMode; /** Are we currently removing? */ private boolean mRemoving; /** Should the dragged item be allowed to move left and right? */ private boolean mAllowLeftRightMovement = false; /** Should we allow items to be added? */ private boolean mAllowAdd = true; /** The resource id for the add button */ private int mAddResource = R.layout.draggable_add; /** The starting position for a drag */ int mStartPosition; /** The ending position for a drag */ int mEndPosition; /** The offset for the drag */ int mDragOffset; /* The top of the removed view */ int mRemoveTop; /* The bottom of the removed view */ int mRemoveBottom; /* The view being dragged */ ImageView mDragView; /* The listener we notify when we are adding an item */ private AddListener mAddListener; /** * List views do not properly measure their height. * We thus implement it correctly to gets around the problem. */ protected void onMeasure (int widthMeasureSpec, int heightMeasureSpec) { // Let our parent figure it out most measurements for us super.onMeasure( widthMeasureSpec, heightMeasureSpec ); logger.debug("onMeasure "+this+ ": width: "+decodeMeasureSpec( widthMeasureSpec )+ "; height: "+decodeMeasureSpec( heightMeasureSpec )+ "; measuredHeight: "+getMeasuredHeight()+ "; measuredWidth: "+getMeasuredWidth() ); int height = 0; // getMeasuredHeight(); // logger.debug("Header height is: {}", height); ListAdapter adapter = getAdapter(); int count = adapter.getCount(); for (int i = 0; i < count; i++) { View child = adapter.getView(i, null, null); child.measure(widthMeasureSpec, heightMeasureSpec); height += child.getMeasuredHeight(); } logger.debug("Setting measured dimension to: {}x{}", getMeasuredWidth(), height); setMeasuredDimension( getMeasuredWidth(), height ); } /** * A helper so we can log measure specs easily * @param measureSpec the measure spect to decode * @return A string representation */ private String decodeMeasureSpec( int measureSpec ) { int mode = View.MeasureSpec.getMode( measureSpec ); String modeString = "<> "; switch( mode ) { case View.MeasureSpec.UNSPECIFIED: modeString = "UNSPECIFIED "; break; case View.MeasureSpec.EXACTLY: modeString = "EXACTLY "; break; case View.MeasureSpec.AT_MOST: modeString = "AT_MOST "; break; } return modeString+Integer.toString( View.MeasureSpec.getSize( measureSpec ) ); } /** Our listener for drop operations */ private DropListener mInnerDropListener = new DropListener() { public void onDrop(int from, int to) { ListAdapter adapter = getAdapter(); if (mAllowAdd) { if (from > 0) from -= 1; if (to > 0) to -= 1; adapter = ((HeaderViewListAdapter) adapter).getWrappedAdapter(); } logger.debug("Adapter: {}", adapter); if (adapter instanceof DraggableAdapter) { logger.debug("Firing onDrop: {} {}", from, to); ((DraggableAdapter)adapter).onDrop(from, to); invalidateViews(); } else { logger.debug("Not a draggable adapter."); } } }; /** Our handler for remove actions */ private RemoveListener mInnerRemoveListener = new RemoveListener() { public void onRemove(int which) { ListAdapter adapter = getAdapter(); if (mAllowAdd) { if (which > 0) which -= 1; adapter = ((HeaderViewListAdapter) adapter).getWrappedAdapter(); } if (adapter instanceof DraggableAdapter) { logger.debug("Firing onRemove: {}", which); ((DraggableAdapter)adapter).onRemove(which); invalidateViews(); } else { logger.debug("Not a removable adapter."); } } }; /** Our handler for drag actions */ private DragListener mInnerDragListener = new DragListener() { // TODO: This should come from style or something. int backgroundColor = 0xe0103010; int defaultBackgroundColor; public void onDragStart(View itemView) { itemView.setVisibility(View.INVISIBLE); defaultBackgroundColor = itemView.getDrawingCacheBackgroundColor(); itemView.setBackgroundColor(backgroundColor); ImageView iv = (ImageView)itemView.findViewById(R.id.drag_handle); if (iv != null) iv.setVisibility(View.INVISIBLE); } public void onDragStop(View itemView) { itemView.setVisibility(View.VISIBLE); itemView.setBackgroundColor(defaultBackgroundColor); ImageView iv = (ImageView)itemView.findViewById(R.id.drag_handle); if (iv != null) iv.setVisibility(View.VISIBLE); } }; /** * Construct the list view. Called by the android inflate system * @param context the context the view will run in * @param attrs the attributes we will take on */ public DraggableListView(Context context, AttributeSet attrs) { super(context, attrs); } /** * Construct the list view. Called by the android inflate system * @param context the context the view will run in */ public DraggableListView(Context context) { super(context); } /** * Toggle if views should be allowed to move left and right while dragging * @param b true if views should be able to move left and right */ public void setAllowLeftRightMovement(boolean b) { mAllowLeftRightMovement = b; } /** * Sets the resource to inflate for the add button. Note that * the resource must include a Button with id interdroid.util.R.add_button * @param resc the resource to inflate for the add button */ public void setAddResource(int resc) { mAddResource = resc; } /** * Sets if we should allow items to be added * @param b true if allow buttons should be shown */ public void setAllowAdd(boolean b) { if(getAdapter() != null) { throw new IllegalStateException( "You must set allow before setting the adapter."); } mAllowAdd = b; } /** * The listener we will notify when an add button is clicked. * @param l the listener to notify */ public void setAddListener(AddListener l) { mAllowAdd = true; mAddListener = l; } /** * Sets the adapter this view will use to construct views. The adapter * must be an instance of DraggableAdapter or an exception will be thrown. * @param adpater the adapter for the list views */ @Override public void setAdapter(ListAdapter adapter) { if (!(adapter instanceof DraggableAdapter)) { throw new IllegalArgumentException("Adapter for a DraggableListView must be a DraggableAdapter"); } if (mAllowAdd) { View header = inflate(getContext(), mAddResource, null); ImageButton addButton = (ImageButton) header.findViewById(R.id.add_button); addButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { logger.debug("Add button clicked."); if (mAddListener != null) { logger.debug("Firing add event."); mAddListener.onAddItem(); } } }); addHeaderView(header); View footer = inflate(getContext(), mAddResource, null); ImageButton footerAddButton = (ImageButton) footer.findViewById(R.id.add_button); footerAddButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { logger.debug("Add button clicked."); if (mAddListener != null) { logger.debug("Firing add event."); mAddListener.onAddItem(); } } }); addFooterView(footer); } super.setAdapter(adapter); } /** * Handles drag touch events. * @param ev the touch events while dragging */ @Override public boolean onTouchEvent(MotionEvent ev) { final int action = ev.getAction(); final int x = (int) ev.getX(); final int y = (int) ev.getY(); // How wide is our drag target? int touched = pointToPosition(x, y); int minX = 0; int maxX = 0; // Break out if they touched the add view if (!mDragMode && mAllowAdd && touched == 0) { return false; } if (touched != INVALID_POSITION && touched != 0) { View tView = getChildAt(touched); if (tView != null) { tView = tView.findViewById(R.id.drag_handle); if (tView == null) { return false; } minX = tView.getLeft(); maxX = tView.getRight(); } } if (!mRemoving && action == MotionEvent.ACTION_DOWN && x >= minX && x <= maxX) { mDragMode = true; } if (!mDragMode) { // Check if we are pressing the remove button if (touched != INVALID_POSITION) { ImageView button = (ImageView) getChildAt(touched).findViewById(R.id.remove_button); switch (action) { case MotionEvent.ACTION_UP: logger.debug("Releasing: {} {}", x, y); logger.debug("{} {}", button.getLeft(), button.getRight()); logger.debug("{} {}", mRemoveTop, mRemoveBottom); if (mRemoving && x >= button.getLeft() && x <= button.getRight() && y >= mRemoveTop && y <= mRemoveBottom) { logger.debug("Remove button pressed."); if (mInnerRemoveListener != null) { mInnerRemoveListener.onRemove(touched); } } button.setImageResource(R.drawable.remove_button); button.postInvalidate(); mRemoving = false; break; case MotionEvent.ACTION_MOVE: if (mRemoving) { logger.debug("Remove button moved: {} {}", x, y); logger.debug("{} {}", button.getLeft(), button.getRight()); logger.debug("{} {}", mRemoveTop, mRemoveBottom); if (x >= button.getLeft() && x <= button.getRight() && y >= mRemoveTop && y <= mRemoveBottom) { logger.debug("Showing as pressed."); button.setImageResource(R.drawable.remove_button_pressed); button.postInvalidate(); } else { button.setImageResource(R.drawable.remove_button); button.postInvalidate(); } } break; case MotionEvent.ACTION_DOWN: if (!mRemoving && x >= button.getLeft() && x <= button.getRight()) { mRemoving = true; mRemoveTop = getChildAt(touched).getTop(); mRemoveBottom = getChildAt(touched).getBottom(); logger.debug("Remove button pressed: {} {}", mRemoveTop, mRemoveBottom); button.setImageResource(R.drawable.remove_button_pressed); button.postInvalidate(); } break; } } } else { switch (action) { case MotionEvent.ACTION_DOWN: { mStartPosition = touched; int mItemPosition = mStartPosition - getFirstVisiblePosition(); logger.debug("Drag: {}", mItemPosition); if (mStartPosition != INVALID_POSITION) { mDragOffset = y - getChildAt(mItemPosition).getTop(); mDragOffset -= ((int)ev.getRawY()) - y; startDrag(mItemPosition,y); logger.debug("Drag Start: {} {} :" + y, getTop(), getBottom()); drag(mAllowLeftRightMovement ? x : 0,y); // Now we need to try to turn off interception requestDisallowInterceptRecursive(getRootView(), true); } } break; case MotionEvent.ACTION_MOVE: { logger.debug("Drag: {} {} :", y, getBottom() - getTop()); if ( y >= 0 && y <= getBottom() - getTop()) drag(mAllowLeftRightMovement ? x : 0, y); } break; case MotionEvent.ACTION_CANCEL: case MotionEvent.ACTION_UP: default: { mDragMode = false; mEndPosition = touched; logger.debug("Checking end: {} {}", mEndPosition, getCount() - 1); if (mEndPosition == getCount() - 1) { View child = getChildAt(mEndPosition); int top = y - (mDragView.getHeight() / 2); logger.debug("Checking top: {} {}", top, child.getTop()); if (top > child.getTop()) { logger.debug("After end."); mEndPosition += 1; } } logger.debug("Dropped: {} {}", mStartPosition, mEndPosition); stopDrag(mStartPosition - getFirstVisiblePosition()); if (mStartPosition != INVALID_POSITION && mEndPosition != INVALID_POSITION && mStartPosition != mEndPosition) mInnerDropListener.onDrop(mStartPosition, mEndPosition); // Now we need to try to turn on interception again requestDisallowInterceptRecursive(getRootView(), false); } break; } } return true; } /** * Hack to disallow intercepts of touch events on all sub views so * that we can drag properly. This is required because PhoneDecore * doesn't pass the request to children properly. * @param root the root view * @param disallow true if we should disallow intercepts */ private void requestDisallowInterceptRecursive(View root, boolean disallow) { if (root instanceof ViewGroup) { ViewGroup rootGroup = (ViewGroup)root; rootGroup.requestDisallowInterceptTouchEvent(disallow); for (int i = 0; i < rootGroup.getChildCount(); i++) { requestDisallowInterceptRecursive(rootGroup.getChildAt(i), disallow); } } } /** * Updates the position of the dragged view. * @param x the x position for the view * @param y the y position for the view */ private void drag(int x, int y) { if (mDragView != null) { WindowManager.LayoutParams layoutParams = (WindowManager.LayoutParams) mDragView.getLayoutParams(); layoutParams.x = x; layoutParams.y = y - mDragOffset; WindowManager mWindowManager = (WindowManager) getContext() .getSystemService(Context.WINDOW_SERVICE); mWindowManager.updateViewLayout(mDragView, layoutParams); } } /** * Starts a drag operation on the given item * @param itemIndex the index of the dragged item * @param y the y offset of the touch which started the drag */ private void startDrag(int itemIndex, int y) { stopDrag(itemIndex); View item = getChildAt(itemIndex); if (item == null) return; item.setDrawingCacheEnabled(true); mInnerDragListener.onDragStart(item); // Create a copy of the drawing cache so that it does not get recycled // by the framework when the list tries to clean up memory Bitmap bitmap = Bitmap.createBitmap(item.getDrawingCache()); WindowManager.LayoutParams mWindowParams = new WindowManager.LayoutParams(); mWindowParams.gravity = Gravity.TOP; mWindowParams.x = 0; mWindowParams.y = y - mDragOffset; mWindowParams.height = WindowManager.LayoutParams.WRAP_CONTENT; mWindowParams.width = WindowManager.LayoutParams.WRAP_CONTENT; mWindowParams.flags = WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE | WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE | WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON | WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN | WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS; mWindowParams.format = PixelFormat.TRANSLUCENT; mWindowParams.windowAnimations = 0; Context context = getContext(); ImageView v = new ImageView(context); v.setImageBitmap(bitmap); WindowManager mWindowManager = (WindowManager)context.getSystemService(Context.WINDOW_SERVICE); mWindowManager.addView(v, mWindowParams); mDragView = v; } /** * Handles stopping a drag of the given item * @param itemIndex the index of the item which is being dragged */ private void stopDrag(int itemIndex) { if (mDragView != null) { mInnerDragListener.onDragStop(getChildAt(itemIndex)); mDragView.setVisibility(GONE); WindowManager wm = (WindowManager)getContext().getSystemService(Context.WINDOW_SERVICE); wm.removeView(mDragView); mDragView.setImageDrawable(null); mDragView = null; } } }
package com.esindexer; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.nio.file.Path; import java.nio.file.WatchEvent; import java.nio.file.WatchKey; import java.nio.file.WatchService; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.Locale; import java.util.TimeZone; import org.apache.log4j.Logger; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; import com.esindexer.preferences.IPreferences; import com.esindexer.xstream.model.ProcessedIndex; import com.esindexer.xstream.model.ProcessedPage; /** * @author Roland Quast (roland@formreturn.com) * */ class FileWatcher implements Runnable { private static Logger LOG = Logger.getLogger(FileWatcher.class); private WatchService myWatcher; private ProcessedIndex index; private IPreferences preferences; private ConfigJson configJson; public FileWatcher(WatchService myWatcher) { this.myWatcher = myWatcher; } @Override public void run() { try { WatchKey key = myWatcher.take(); while(key != null) { for (WatchEvent<?> event : key.pollEvents()) { final Path changed = (Path) event.context(); LOG.debug("File updated: " + changed); if ( !(index.getPath().endsWith(changed.toString())) ) { continue; } try { processFile(); } catch (FileNotFoundException e) { LOG.error(e, e); } catch (IOException e) { LOG.error(e, e); } catch (ParseException e) { LOG.error(e, e); } catch (java.text.ParseException e) { LOG.error(e, e); } } key.reset(); key = myWatcher.take(); } } catch (InterruptedException e) { LOG.info(e, e); } } private void processFile() throws FileNotFoundException, IOException, ParseException, java.text.ParseException { TransportClient client = new TransportClient(); for ( String node: this.configJson.getNodes() ) { client.addTransportAddress(new InetSocketTransportAddress(node, 9300)); } JSONParser parser = new JSONParser(); Object obj = parser.parse(new FileReader(index.getPath())); JSONArray pageList = (JSONArray) obj; for (Object pageObj : pageList.toArray()) { JSONObject pageJObj = (JSONObject) pageObj; String modifiedStr = ((String) pageJObj.get("modified")).trim(); String url = ((String) pageJObj.get("url")).trim(); String title = ((String) pageJObj.get("title")).trim(); String content = (String) pageJObj.get("content"); String path = ((String) pageJObj.get("path")).trim(); String categoriesStr = (String) pageJObj.get("categories"); String tag = ((String) pageJObj.get("tag")).trim(); String type = ((String) pageJObj.get("type")).trim(); DateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss Z", Locale.ENGLISH); format.setTimeZone(TimeZone.getTimeZone("UTC")); Date newModified = format.parse(modifiedStr); ProcessedPage processedPage = null; if ( index.getProcessedPages().containsKey(url) ) { processedPage = index.getProcessedPages().get(url); Date lastModified = processedPage.getModified(); if ( newModified.after(lastModified) ) { processedPage = new ProcessedPage(); processedPage.setUrl(url); processedPage.setModified(newModified); processedPage.setTitle(title); processedPage.setContent(content); processedPage.setPath(path); processedPage.setType(type); for ( String category: categoriesStr.split(",") ) { processedPage.getCategories().add(category.trim()); } processedPage.getTags().add(tag); if ( updateIndex(client, processedPage) ) { index.getProcessedPages().put(url, processedPage); preferences.save(); } } } else { processedPage = new ProcessedPage(); processedPage.setUrl(url); processedPage.setModified(newModified); processedPage.setTitle(title); processedPage.setContent(content); processedPage.setPath(path); processedPage.setType(type); for ( String category: categoriesStr.split(",") ) { processedPage.getCategories().add(category.trim()); } processedPage.getTags().add(tag); if ( updateIndex(client, processedPage) ) { index.getProcessedPages().put(url, processedPage); preferences.save(); } } } client.close(); } private boolean updateIndex(Client client, ProcessedPage processedPage) { JSONObject obj = new JSONObject(); obj.put("url", processedPage.getUrl()); obj.put("title", processedPage.getTitle()); obj.put("content", processedPage.getContent()); obj.put("modified", "\"" + processedPage.getModified() + "\""); String json = null; try { json = obj.toJSONString(); } catch ( Exception ex ) { LOG.error(ex, ex); } if ( json == null ) { return false; } LOG.debug(json); IndexResponse response = null; try { response = client.prepareIndex(configJson.getIndex(), processedPage.getType(), processedPage.getUrl()) .setSource(json) .execute() .actionGet(); } catch ( Exception ex ) { LOG.error(ex, ex); return false; } if ( response != null ) { if ( response.isCreated() ) { LOG.info("ElasticSearch response was \"created\"."); } else { LOG.info("ElasticSearch response was \"updated\""); } return true; } else { LOG.error("No response object created."); return false; } } public void setProcessedIndex(ProcessedIndex index) { this.index = index; } public void setPreferences(IPreferences preferences) { this.preferences = preferences; } public void setConfigJson(ConfigJson configJson) { this.configJson = configJson; } }
package io.flutter.inspector; import com.google.gson.JsonObject; import com.intellij.openapi.Disposable; import com.intellij.openapi.diagnostic.Logger; import com.intellij.util.Alarm; import com.intellij.xdebugger.XSourcePosition; import io.flutter.utils.StreamSubscription; import io.flutter.vmService.DartVmServiceDebugProcess; import io.flutter.vmService.VMServiceManager; import org.dartlang.vm.service.VmService; import org.dartlang.vm.service.consumer.EvaluateConsumer; import org.dartlang.vm.service.consumer.GetIsolateConsumer; import org.dartlang.vm.service.consumer.GetObjectConsumer; import org.dartlang.vm.service.element.*; import org.jetbrains.annotations.NotNull; import java.util.Map; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.function.Supplier; /** * Invoke methods from a specified Dart library using the observatory protocol. */ public class EvalOnDartLibrary implements Disposable { private static final Logger LOG = Logger.getInstance(EvalOnDartLibrary.class); private final StreamSubscription<IsolateRef> subscription; private String isolateId; private final VmService vmService; @SuppressWarnings("FieldCanBeLocal") private final VMServiceManager vmServiceManager; private final Set<String> libraryNames; CompletableFuture<LibraryRef> libraryRef; private final Alarm myRequestsScheduler; /** * For robustness we ensure at most one pending request is issued at a time. */ private CompletableFuture<?> allPendingRequestsDone; private final Object pendingRequestLock = new Object(); /** * Public so that other related classes such as InspectorService can ensure their * requests are in a consistent order with requests which eliminates otherwise * surprising timing bugs such as if a request to dispose an * InspectorService.ObjectGroup was issued after a request to read properties * from an object in a group but the request to dispose the object group * occurred first. * <p> * The design is we have at most 1 pending request at a time. This sacrifices * some throughput with the advantage of predictable semantics and the benefit * that we are able to skip large numbers of requests if they happen to be * from groups of objects that should no longer be kept alive. * <p> * The optional ObjectGroup specified by isAlive, indicates whether the * request is still relevant or should be cancelled. This is an optimization * for the Inspector to avoid overloading the service with stale requests if * the user is quickly navigating through the UI generating lots of stale * requests to view specific details subtrees. */ public <T> CompletableFuture<T> addRequest(InspectorService.ObjectGroup isAlive, Supplier<CompletableFuture<T>> request) { if (isAlive != null && isAlive.isDisposed()) { return CompletableFuture.completedFuture(null); } if (myRequestsScheduler.isDisposed()) { return CompletableFuture.completedFuture(null); } // Future that completes when the request has finished. final CompletableFuture<T> response = new CompletableFuture<>(); // This is an optimization to avoid sending stale requests across the wire. final Runnable wrappedRequest = () -> { if (isAlive != null && isAlive.isDisposed()) { response.complete(null); return; } final CompletableFuture<T> future = request.get(); future.whenCompleteAsync((v, t) -> { if (t != null) { response.completeExceptionally(t); } else { response.complete(v); } }); }; synchronized (pendingRequestLock) { if (allPendingRequestsDone == null || allPendingRequestsDone.isDone()) { allPendingRequestsDone = response; myRequestsScheduler.addRequest(wrappedRequest, 0); } else { final CompletableFuture<?> previousDone = allPendingRequestsDone; allPendingRequestsDone = response; // Actually schedule this request only after the previous request completes. previousDone.whenCompleteAsync((v, error) -> { if (myRequestsScheduler.isDisposed()) { response.complete(null); } else { myRequestsScheduler.addRequest(wrappedRequest, 0); } }); } } return response; } public EvalOnDartLibrary(Set<String> libraryNames, VmService vmService, VMServiceManager vmServiceManager) { this.libraryNames = libraryNames; this.vmService = vmService; this.vmServiceManager = vmServiceManager; this.myRequestsScheduler = new Alarm(Alarm.ThreadToUse.POOLED_THREAD, this); libraryRef = new CompletableFuture<>(); subscription = vmServiceManager.getCurrentFlutterIsolate((isolate) -> { if (libraryRef.isDone()) { libraryRef = new CompletableFuture<>(); } if (isolate != null) { initialize(isolate.getId()); } }, true); } public String getIsolateId() { return isolateId; } public void dispose() { subscription.dispose(); // TODO(jacobr): complete all pending futures as cancelled? } public CompletableFuture<InstanceRef> eval(String expression, Map<String, String> scope, InspectorService.ObjectGroup isAlive) { return addRequest(isAlive, () -> { final CompletableFuture<InstanceRef> future = new CompletableFuture<>(); libraryRef.thenAcceptAsync((LibraryRef ref) -> vmService.evaluate( getIsolateId(), ref.getId(), expression, scope, true, new EvaluateConsumer() { @Override public void onError(RPCError error) { future.completeExceptionally( new EvalException(expression, Integer.toString(error.getCode()), error.getMessage())); } @Override public void received(ErrorRef response) { future.completeExceptionally( new EvalException(expression, response.getKind().name(), response.getMessage())); } @Override public void received(InstanceRef response) { future.complete(response); } @Override public void received(Sentinel response) { future.completeExceptionally( new EvalException(expression, "Sentinel", response.getValueAsString())); } } )); return future; }); } @SuppressWarnings("unchecked") public <T extends Obj> CompletableFuture<T> getObjHelper(ObjRef instance, InspectorService.ObjectGroup isAlive) { return addRequest(isAlive, () -> { final CompletableFuture<T> future = new CompletableFuture<>(); vmService.getObject( getIsolateId(), instance.getId(), new GetObjectConsumer() { @Override public void onError(RPCError error) { future.completeExceptionally(new RuntimeException(error.toString())); } @Override public void received(Obj response) { future.complete((T)response); } @Override public void received(Sentinel response) { future.completeExceptionally(new RuntimeException(response.toString())); } } ); return future; }); } @NotNull public CompletableFuture<XSourcePosition> getSourcePosition(DartVmServiceDebugProcess debugProcess, ScriptRef script, int tokenPos, InspectorService.ObjectGroup isAlive) { return addRequest(isAlive, () -> CompletableFuture.completedFuture(debugProcess.getSourcePosition(isolateId, script, tokenPos))); } public CompletableFuture<Instance> getInstance(InstanceRef instance, InspectorService.ObjectGroup isAlive) { return getObjHelper(instance, isAlive); } public CompletableFuture<Library> getLibrary(LibraryRef instance, InspectorService.ObjectGroup isAlive) { return getObjHelper(instance, isAlive); } public CompletableFuture<ClassObj> getClass(ClassRef instance, InspectorService.ObjectGroup isAlive) { return getObjHelper(instance, isAlive); } public CompletableFuture<Func> getFunc(FuncRef instance, InspectorService.ObjectGroup isAlive) { return getObjHelper(instance, isAlive); } public CompletableFuture<Instance> getInstance(CompletableFuture<InstanceRef> instanceFuture, InspectorService.ObjectGroup isAlive) { return instanceFuture.thenComposeAsync((instance) -> getInstance(instance, isAlive)); } private JsonObject convertMapToJsonObject(Map<String, String> map) { final JsonObject obj = new JsonObject(); for (String key : map.keySet()) { obj.addProperty(key, map.get(key)); } return obj; } private void initialize(String isolateId) { this.isolateId = isolateId; vmService.getIsolate(isolateId, new GetIsolateConsumer() { @Override public void received(Isolate response) { for (LibraryRef library : response.getLibraries()) { if (libraryNames.contains(library.getUri())) { libraryRef.complete(library); return; } } libraryRef.completeExceptionally(new RuntimeException("No library matching " + libraryNames + " found.")); } @Override public void received(Sentinel response) { libraryRef.completeExceptionally(new RuntimeException(response.toString())); } @Override public void onError(RPCError error) { libraryRef.completeExceptionally(new RuntimeException(error.toString())); } }); } }
package com.iyzipay; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Base64; import java.util.logging.Level; import java.util.logging.Logger; public final class HashGenerator { private HashGenerator() { } public static String generateHash(String apiKey, String secretKey, String randomString, Object request) { String input = apiKey + randomString + secretKey + request; StringBuilder sb = new StringBuilder(); byte[] result = null; try { MessageDigest crypt = MessageDigest.getInstance("SHA1"); result = crypt.digest(input.getBytes()); for (int i = 0; i < result.length; i++) { sb.append(Integer.toString((result[i] & 0xff) + 0x100, 16).substring(1)); } } catch (NoSuchAlgorithmException ex) { Logger.getLogger(HashGenerator.class.getName()).log(Level.SEVERE, null, ex); } return Base64.getEncoder().encodeToString(result); } }
package io.flutter.project; import com.intellij.icons.AllIcons; import com.intellij.ide.IconProvider; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Iconable; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiDirectory; import com.intellij.psi.PsiElement; import com.jetbrains.lang.dart.DartFileType; import com.jetbrains.lang.dart.psi.DartFile; import icons.FlutterIcons; import io.flutter.FlutterUtils; import io.flutter.pub.PubRoot; import io.flutter.utils.FlutterModuleUtils; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.util.Objects; import static com.intellij.psi.impl.ElementBase.overlayIcons; public class FlutterIconProvider extends IconProvider { private static final Icon TEST_FILE = overlayIcons(DartFileType.INSTANCE.getIcon(), AllIcons.Nodes.JunitTestMark); @Nullable public Icon getIcon(@NotNull final PsiElement element, @Iconable.IconFlags final int flags) { final Project project = element.getProject(); if (!FlutterModuleUtils.usesFlutter(project)) return null; // Directories. if (element instanceof PsiDirectory) { final VirtualFile file = ((PsiDirectory)element).getVirtualFile(); if (!file.isInLocalFileSystem()) return null; // Show an icon for flutter modules. final PubRoot pubRoot = PubRoot.forDirectory(file); if (pubRoot != null && pubRoot.declaresFlutter()) { return FlutterIcons.Flutter; } final PubRoot root = PubRoot.forDirectory(file.getParent()); if (root == null) return null; // TODO(devoncarew): should we just make the folder a source kind? if (file.equals(root.getLib())) return AllIcons.Modules.SourceRoot; if (Objects.equals(file, root.getAndroidDir())) return AllIcons.Nodes.KeymapTools; if (Objects.equals(file, root.getiOsDir())) return AllIcons.Nodes.KeymapTools; if (file.isDirectory() && file.getName().equals(".idea")) return AllIcons.Modules.GeneratedFolder; } // Files. if (element instanceof DartFile) { final DartFile dartFile = (DartFile)element; final VirtualFile file = dartFile.getVirtualFile(); if (!file.isInLocalFileSystem()) return null; // Use a simple naming convention heuristic to identify test files. // TODO(pq): consider pushing up to the Dart Plugin. if (FlutterUtils.isInTestDir(dartFile) && file.getName().endsWith("_test.dart")) { return TEST_FILE; } } return null; } }
package io.compgen.ngsutils.vcf; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import io.compgen.common.ListBuilder; import io.compgen.common.StringUtils; public class VCFHeader { protected String format; protected Map<String,VCFAnnotationDef> infoDefs = new LinkedHashMap<String, VCFAnnotationDef>(); protected Map<String,VCFAnnotationDef> formatDefs = new LinkedHashMap<String, VCFAnnotationDef>(); protected Map<String,VCFFilterDef> filterDefs = new LinkedHashMap<String, VCFFilterDef>(); protected List<String> lines = new ArrayList<String>(); protected String headerLine; protected String[] samples = null; public VCFHeader(String format, List<String> input, String headerLine) throws VCFParseException { if (format == null) { throw new VCFParseException("Missing format in header?"); } this.format = format; this.headerLine = headerLine; for (String line: input) { if (line.startsWith("##INFO=")) { addInfo(VCFAnnotationDef.parseString(line)); } else if (line.startsWith("##FORMAT=")) { addFormat(VCFAnnotationDef.parseString(line)); } else if (line.startsWith("##FILTER=")) { addFilter(VCFFilterDef.parse(line)); } else { lines.add(line); } } String[] spl = headerLine.split("\t"); if (spl.length > 9) { samples = new String[spl.length-9]; for (int i=9; i< spl.length; i++) { samples[i-9]=spl[i]; } } } public List<String> getSamples() { return ListBuilder.build(samples); } public void addLine(String line) { this.lines.add(line); } public void write(OutputStream out) throws IOException { write(out, true, false); } public void write(OutputStream out, boolean includeAll, boolean strip) throws IOException { if (includeAll) { while (!format.startsWith(" format = "#" + format; } StringUtils.writeOutputStream(out, format + "\n"); } List<String> outlines = new ArrayList<String>();; if (!strip) { for (VCFAnnotationDef def: infoDefs.values()) { outlines.add(def.toString()); } for (VCFFilterDef def: filterDefs.values()) { outlines.add(def.toString()); } for (VCFAnnotationDef def: formatDefs.values()) { outlines.add(def.toString()); } outlines.addAll(lines); } else { for (String line: lines) { // when stripping the output, still output all other lines except the sample lines if (!line.startsWith("##SAMPLE=")) { outlines.add(line); } } } for (String line: outlines) { StringUtils.writeOutputStream(out, line + "\n"); } if (includeAll) { if (strip) { StringUtils.writeOutputStream(out, "#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n"); } else { if (!headerLine.startsWith(" headerLine = "#" + headerLine; } StringUtils.writeOutputStream(out, headerLine + "\n"); } } } public boolean contains(String s) { for (String line: lines) { if (line.equals(s)) { return true; } } return false; } public int getSamplePosByName(String name) { // You can ID a sample by index (1, 2, 3) try { int i = Integer.parseInt(name); return i - 1; } catch (NumberFormatException e) { // ignore this... } // OR by name NORMAL, TUMOR, etc... for (int i=0; i<samples.length; i++) { if (samples[i].equals(name)) { return i; } } return -1; } public void addInfo(VCFAnnotationDef def) { infoDefs.put(def.id, def); } public void addFormat(VCFAnnotationDef def) { formatDefs.put(def.id, def); } public void addFilter(VCFFilterDef def) { filterDefs.put(def.id, def); } public VCFAnnotationDef getFormatDef(String id) { return formatDefs.get(id); } public Set<String> getFormatIDs() { return formatDefs.keySet(); } public VCFAnnotationDef getInfoDef(String id) { return infoDefs.get(id); } public Set<String> getInfoIDs() { return infoDefs.keySet(); } public List<String> getContigNames() { List<String> names = new ArrayList<String>(); for (String line: lines) { if (line.startsWith("##contig=<") && line.endsWith(">")) { // contig lines are formatted: // ##contig=<ID=name,length=num,...> for (String s: line.substring(10, line.length()-1).split(",")) { String[] spl = s.split("="); if (spl[0].toUpperCase().equals("ID")) { names.add(spl[1]); } } } } return names; } public int getContigLength(String name) { for (String line: lines) { if (line.startsWith("##contig=<") && line.endsWith(">")) { // contig lines are formatted: // ##contig=<ID=name,length=num,...> boolean found = false; int length = -1; for (String s: line.substring(10, line.length()-1).split(",")) { String[] spl = s.split("="); if (spl[0].toUpperCase().equals("ID")) { if (spl[1].equals(name)) { found = true; } } if (spl[0].toUpperCase().equals("LENGTH")) { length = Integer.parseInt(spl[1]); } } if (found) { return length; } } } return -1; } public static String quoteString(String s) { s=s.replaceAll("\\\\", "\\\\\\\\"); s=s.replaceAll("\\\"", "\\\\\""); return s; } public static Map<String, String> parseQuotedLine(String s) throws VCFParseException { Map<String, String> values = new HashMap<String, String>(); String k = null; String acc = ""; boolean inquote = false; for (int i=0; i<s.length(); i++) { if (k == null) { if (s.charAt(i) == '=') { k = acc; inquote = false; acc = ""; } else { acc += s.charAt(i); } } else { if (!inquote && s.charAt(i) == '"') { inquote = true; } else if (inquote && s.charAt(i) == '"') { inquote = false; } else if (s.charAt(i) == ',') { values.put(k, acc); k = null; acc = ""; } else if (inquote && s.charAt(i) == '\\' && i < s.length()-1) { // escape the next char... acc += s.charAt(i+1); i++; } else { acc += s.charAt(i); } } } if (k != null) { values.put(k, acc); } return values; } }
package net.sf.picard.util; import java.util.*; /** * Small utility methods for dealing with collection classes. * @author mccowan */ public class CollectionUtil { public static <T> List<T> makeList (final T... list) { final List<T> result = new ArrayList<T>(); Collections.addAll(result, list); return result; } public static <T> Set<T> makeSet (final T... list) { final Set<T> result = new HashSet<T>(); Collections.addAll(result, list); return result; } /** Construct a string by toString()ing each item in the collection with inBetween between each item. */ public static String join(final Collection<?> items, final String inBetween) { final StringBuilder builder = new StringBuilder(); for (final Object item : items) { if (builder.length() > 0) builder.append(inBetween); builder.append(item); } return builder.toString(); } /** Simple multi-map for convenience of storing collections in map values. */ public static class MultiMap<K, V> extends HashMap<K, Collection<V>> { public void append(final K k, final V v) { this.initializeKeyIfUninitialized(k); this.get(k).add(v); } public void appendAll(final K k, final Collection<? extends V> v) { this.initializeKeyIfUninitialized(k); this.get(k).addAll(v); } private void initializeKeyIfUninitialized(final K k) { if (!this.containsKey(k)) this.put(k, new LinkedList<V>()); } } /** * A defaulting map, which returns a default value when a value that does not exist in the map is looked up. * * This map supports two modes: injecting-on-default, and not injecting-on-default. When injecting on default, when a lookup is * performed and a default value is returned, the default value is injected at that key, so that it now lives in the underlying map. * Without this mode, the value is simply returned and the underlying map is unaffected. * * Note: When using injecting-on-default mode, and performing a lookup with a non-key type (the get method accepts any object), a * class cast exception will be thrown because a non-key type cannot be added to the map. * @param <K> * @param <V> */ public static class DefaultingMap<K, V> extends HashMap<K, V> { final Factory<V> defaultGenerator; final boolean injectValueOnDefault; /** Creates a defaulting map which defaults to the provided value and with injecting-on-default disabled. */ public DefaultingMap(final V defaultValue) { this(new Factory<V>() { @Override public V make() { return defaultValue; } }, false); } /** * Creates a defaulting map that generates defaults from the provided factory. This is useful when the default is non-static, or * the default is mutable, and the client wishes to get a value and mutate it and persist those changes in the map. */ public DefaultingMap(final Factory<V> defaultGenerator, final boolean injectValueOnDefaulting) { this.defaultGenerator = defaultGenerator; this.injectValueOnDefault = injectValueOnDefaulting; } @Override @SuppressWarnings("unchecked") // Expect that the cast is successful; otherwise, client is breaking contract. public V get(final Object key) { if (!this.containsKey(key)) { final V val = this.defaultGenerator.make(); if (this.injectValueOnDefault) { this.put((K) key, val); } return val; } else { return super.get(key); } } public interface Factory<V> { V make(); } } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platfroms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:20-05-29"); this.setApiVersion("16.3.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platforms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:22-03-27"); this.setApiVersion("18.1.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platforms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:22-03-23"); this.setApiVersion("18.1.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platfroms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:21-04-06"); this.setApiVersion("16.19.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
package org.relique.jdbc.csv; import java.io.InputStream; import java.io.Reader; import java.io.StringReader; import java.math.BigDecimal; import java.net.URL; import java.sql.Array; import java.sql.Blob; import java.sql.Clob; import java.sql.Date; import java.sql.NClob; import java.sql.Ref; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.RowId; import java.sql.SQLException; import java.sql.SQLWarning; import java.sql.SQLXML; import java.sql.Statement; import java.sql.Time; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import org.relique.io.DataReader; import org.relique.io.ListDataReader; /** * This class implements the java.sql.ResultSet JDBC interface for the * CsvJdbc driver. * * @author Jonathan Ackerman * @author Michael Maraya * @author Tomasz Skutnik * @author Chetan Gupta * @version $Id: CsvResultSet.java,v 1.56 2011/11/01 13:23:00 simoc Exp $ */ public class CsvResultSet implements ResultSet { /** Metadata for this ResultSet */ private ResultSetMetaData resultSetMetaData; /** Statement that produced this ResultSet */ private CsvStatement statement; private int isScrollable = ResultSet.TYPE_SCROLL_SENSITIVE; /** Helper class that performs the actual file reads */ private DataReader reader; /** Table referenced by the Statement */ private String tableName; /** Last column name index read */ private int lastIndexRead = -1; private Expression whereClause; private List<Expression> groupByColumns; private List<Expression> distinctColumns; private Expression havingClause; private List<Object []> orderByColumns; private List<Object []> queryEnvironment; private List<AggregateFunction> aggregateFunctions; private Set<ArrayList<Object>> distinctValues; private Map<String, Object> recordEnvironment; private List<String> usedColumns; private String timeFormat; private String dateFormat; private String timeZone; private StringConverter converter; private ArrayList<Map<String, Object>> bufferedRecordEnvironments = null; private int currentRow; private boolean hitTail = false; private int maxRows; private int fetchSize; private int limit; private boolean isClosed = false; /** * Compares SQL ORDER BY expressions for two records. */ public class OrderByComparator implements Comparator<Map<String, Object>> { public int compare(Map<String, Object> recordEnvironment1, Map<String, Object> recordEnvironment2) { int retval = 0; int i = 0; while (i < orderByColumns.size() && retval == 0) { Object []o = orderByColumns.get(i); Integer direction = (Integer)o[0]; Expression expr = (Expression)o[1]; recordEnvironment = recordEnvironment1; Map<String, Object> objectEnvironment1 = updateRecordEnvironment(true); if (converter != null) objectEnvironment1.put("@STRINGCONVERTER", converter); Comparable<Object> result1 = (Comparable<Object>)expr.eval(objectEnvironment1); recordEnvironment = recordEnvironment2; Map<String, Object> objectEnvironment2 = updateRecordEnvironment(true); if (converter != null) objectEnvironment2.put("@STRINGCONVERTER", converter); Comparable<Object> result2 = (Comparable<Object>)expr.eval(objectEnvironment2); if (result1 == null) { if (result2 == null) retval = 0; else retval = -1; } else if (result2 == null) { retval = 1; } else { retval = result1.compareTo(result2); } if (direction.intValue() < 0) retval = -retval; i++; } return retval; } } /** * Constructor for the CsvResultSet object * * @param statement Statement that produced this ResultSet * @param reader Helper class that performs the actual file reads * @param tableName Table referenced by the Statement * @param typeNames Array of available columns for referenced table * @param whereClause expression for the SQL where clause. * @param groupByColumns expressions for SQL GROUP BY clause. * @param orderByColumns expressions for SQL ORDER BY clause. * @param sqlLimit maximum number of rows set with SQL LIMIT clause. * @param sqlOffset number of rows to skip with SQL OFFSET clause. * @param columnTypes A comma-separated string specifying the type of the i-th column of the database table (not of the result). * @param whereColumnName the name of the column, needed late by a select * * @throws ClassNotFoundException in case the typed columns fail * @throws SQLException */ protected CsvResultSet(CsvStatement statement, DataReader reader, String tableName, List<Object []> queryEnvironment, boolean isDistinct, int isScrollable, Expression whereClause, List<Expression> groupByColumns, Expression havingClause, List<Object []> orderByColumns, int sqlLimit, int sqlOffset, String columnTypes, int skipLeadingLines) throws ClassNotFoundException, SQLException { this.statement = statement; maxRows = statement.getMaxRows(); fetchSize = statement.getFetchSize(); this.limit = sqlLimit; this.isScrollable = isScrollable; this.reader = reader; this.tableName = tableName; this.queryEnvironment = new ArrayList<Object []>(queryEnvironment); this.aggregateFunctions = new ArrayList<AggregateFunction>(); this.whereClause = whereClause; if (groupByColumns != null) this.groupByColumns = new ArrayList<Expression>(groupByColumns); else this.groupByColumns = null; this.havingClause = havingClause; if (orderByColumns != null) this.orderByColumns = new ArrayList<Object []>(orderByColumns); else this.orderByColumns = null; if (isDistinct) this.distinctValues = new HashSet<ArrayList<Object>>(); if(reader instanceof CsvReader || reader instanceof ListDataReader) { // timestampFormat = ((CsvConnection)statement.getConnection()).getTimestampFormat(); timeFormat = ((CsvConnection)statement.getConnection()).getTimeFormat(); dateFormat = ((CsvConnection)statement.getConnection()).getDateFormat(); timeZone = ((CsvConnection)statement.getConnection()).getTimeZoneName(); this.converter = new StringConverter(dateFormat, timeFormat, timeZone); if (reader instanceof CsvReader) { ((CsvReader) reader).setConverter(converter); if(!"".equals(columnTypes)) ((CsvReader) reader).setColumnTypes(columnTypes); } } if (whereClause!= null) this.usedColumns = new LinkedList<String>(whereClause.usedColumns()); else this.usedColumns = new LinkedList<String>(); String[] columnNames = reader.getColumnNames(); String tableAlias = reader.getTableAlias(); HashSet<String> allReaderColumns = new HashSet<String>(); for (int i = 0; i < columnNames.length; i++) { String columnName = columnNames[i].toUpperCase(); allReaderColumns.add(columnName); if (tableAlias != null) allReaderColumns.add(tableAlias + "." + columnName); } /* * Replace any "select *" with the list of column names in that table. */ for (int i = 0; i < this.queryEnvironment.size(); i++) { Object[] o = this.queryEnvironment.get(i); if (o[1] instanceof AsteriskExpression) { AsteriskExpression asteriskExpression = (AsteriskExpression)o[1]; /* * Check that any table alias is valid. */ String asterisk = asteriskExpression.toString(); if (!(asterisk.equals("*") || (tableAlias != null && asterisk.equalsIgnoreCase(tableAlias + ".*")))) throw new SQLException("Invalid column name: " + asterisk); this.queryEnvironment.remove(i); for (int j = 0; j < columnNames.length; j++) { this.queryEnvironment.add(i + j, new Object[]{columnNames[j], new ColumnName(columnNames[j])}); } } } /* * Replace any "group by 2" with the 2nd column in the query list. */ if (this.groupByColumns != null) { for (int i = 0; i < this.groupByColumns.size(); i++) { Expression expression = this.groupByColumns.get(i); if (expression instanceof NumericConstant) { NumericConstant n = (NumericConstant)expression; if (!(n.value instanceof Integer)) throw new SQLException("Invalid GROUP BY column: " + n); int index = n.value.intValue(); /* * Column numbering in SQL starts at 1, not 0. */ index if (index < 0 || index >= this.queryEnvironment.size()) { throw new SQLException("Invalid GROUP BY column: " + (index + 1)); } Object[] q = this.queryEnvironment.get(index); this.groupByColumns.set(i, (Expression)q[1]); } } } if (this.groupByColumns != null) { for (Expression expr : this.groupByColumns) { this.usedColumns.addAll(expr.usedColumns()); } if (havingClause!= null) this.usedColumns.addAll(havingClause.usedColumns()); } /* * Replace any "order by 2" with the 2nd column in the query list. */ if (this.orderByColumns != null) { for (Object []o : this.orderByColumns) { Expression expression = (Expression)o[1]; if (expression instanceof NumericConstant) { NumericConstant n = (NumericConstant)expression; if (!(n.value instanceof Integer)) throw new SQLException("Invalid ORDER BY column: " + n); int index = n.value.intValue(); /* * Column numbering in SQL starts at 1, not 0. */ index if (index < 0 || index >= this.queryEnvironment.size()) { throw new SQLException("Invalid ORDER BY column: " + (index + 1)); } Object[] q = this.queryEnvironment.get(index); o[1] = q[1]; } } } if (this.orderByColumns != null) { for (Object []o : this.orderByColumns) { Expression expr = (Expression)o[1]; this.usedColumns.addAll(expr.usedColumns()); } } /* * Find any SQL aggregate functions so they can be evaluated separately. */ for (int i = 0; i < this.queryEnvironment.size(); i++) { Object[] o = this.queryEnvironment.get(i); Expression expr = (Expression)o[1]; List<AggregateFunction> exprAggregateFunctions = expr.aggregateFunctions(); this.aggregateFunctions.addAll(exprAggregateFunctions); for (AggregateFunction aggregateFunction : exprAggregateFunctions) { this.usedColumns.addAll(aggregateFunction.aggregateColumns()); } } if (aggregateFunctions.size() > 0 && this.groupByColumns == null) { /* * Check there is no mix of query columns and aggregate functions. */ List<String> allUsedColumns = new LinkedList<String>(); for (int i = 0; i < this.queryEnvironment.size(); i++) { Object[] o = this.queryEnvironment.get(i); if (o[1] != null) { allUsedColumns.addAll(((Expression)o[1]).usedColumns()); } } if (allUsedColumns.size() > 0 && aggregateFunctions.size() > 0) throw new SQLException("Query columns cannot be combined with aggregate functions"); } if (whereClause != null && whereClause.aggregateFunctions().size() > 0) throw new SQLException("Aggregate functions not allowed in WHERE clause"); if (!((CsvConnection)statement.getConnection()).isIndexedFiles()) { //TODO no check when indexedFiles=true because unit test TestCsvDriver.testFromNonExistingIndexedTable then fails. /* * Check that each selected expression is valid, using only column names contained in the table. */ for (int i = 0; i < this.queryEnvironment.size(); i++) { Object[] o = this.queryEnvironment.get(i); if (o[1] != null) { Expression expr = (Expression)o[1]; List<String> exprUsedColumns = expr.usedColumns(); for (Object usedColumn : exprUsedColumns) { if (!allReaderColumns.contains(usedColumn)) throw new SQLException("Invalid column name: " + usedColumn); } } //TODO selected column aliases are allowed in WHERE clause (although this is invalid SQL) and unit tested in TestCsvDriver.testFieldAsAlias so add all aliases to list too. allReaderColumns.add(o[0].toString()); } } /* * Check that all columns used in the WHERE, GROUP BY, HAVING * and ORDER BY clauses do exist in the table. */ if (!((CsvConnection)statement.getConnection()).isIndexedFiles()) { for (Object usedColumn : this.usedColumns) { if (!allReaderColumns.contains(usedColumn)) throw new SQLException("Invalid column name: " + usedColumn); } checkGroupBy(); if (this.orderByColumns != null) { for (Object []o : this.orderByColumns) { Expression expr = (Expression)o[1]; List<String> exprUsedColumns = new LinkedList<String>(expr.usedColumns()); for (AggregateFunction aggregatFunction : expr.aggregateFunctions()) { exprUsedColumns.addAll(aggregatFunction.aggregateColumns()); } if (exprUsedColumns.isEmpty()) { /* * Must order by something that contains at least one column, not 'foo' or 1+1. */ throw new SQLException("Invalid ORDER BY column: " + expr.toString()); } } } } if (this.groupByColumns != null || this.orderByColumns != null || this.aggregateFunctions.size() > 0 || this.isScrollable == ResultSet.TYPE_SCROLL_SENSITIVE) { bufferedRecordEnvironments = new ArrayList<Map<String, Object>>(); currentRow = 0; } if (this.groupByColumns != null) { /* * Read all rows and group them together based on GROUP BY expressions. */ int savedMaxRows = maxRows; int savedLimit = limit; maxRows = 0; limit = -1; ArrayList<ArrayList<Object>> groupOrder = new ArrayList<ArrayList<Object>>(); HashMap<ArrayList<Object>, ArrayList<Map<String, Object>>> groups = new HashMap<ArrayList<Object>, ArrayList<Map<String, Object>>>(); try { while (next()) { Map<String, Object> objectEnvironment = updateRecordEnvironment(true); if (converter != null) objectEnvironment.put("@STRINGCONVERTER", converter); ArrayList<Object> groupByKeys = new ArrayList<Object>(this.groupByColumns.size()); for (Expression expr : this.groupByColumns) { groupByKeys.add(expr.eval(objectEnvironment)); } ArrayList<Map<String, Object>> groupByValues = groups.get(groupByKeys); if (groupByValues == null) { groupByValues = new ArrayList<Map<String, Object>>(); groups.put(groupByKeys, groupByValues); groupOrder.add(groupByKeys); } groupByValues.add(recordEnvironment); } bufferedRecordEnvironments.clear(); for (ArrayList<Object> groupByKey : groupOrder) { ArrayList<Map<String, Object>> values = groups.get(groupByKey); /* * Create a row in the ResultSet for each group with a * reference to all the rows in that group so we can * later calculate any aggregate functions for each group. */ Map<String, Object> firstRow = new HashMap<String, Object>(values.get(0)); firstRow.put("@GROUPROWS", values); if (this.havingClause == null || this.havingClause.isTrue(firstRow)) bufferedRecordEnvironments.add(firstRow); } if (this.orderByColumns != null) { sortRows(sqlOffset); } } finally { maxRows = savedMaxRows; limit = savedLimit; } /* * Rewind back to before the row so we can read it. */ currentRow = 0; recordEnvironment = null; updateRecordEnvironment(false); hitTail = true; } else if (this.aggregateFunctions.size() > 0) { /* * Read all rows, evaluating the aggregate functions for each row to * produce a single row result. */ int savedMaxRows = maxRows; int savedLimit = limit; maxRows = 0; limit = -1; try { while (next()) { for (Object o : this.aggregateFunctions) { AggregateFunction func = (AggregateFunction)o; func.processRow(recordEnvironment); } } /* * Create a single row ResultSet from the aggregate functions. */ bufferedRecordEnvironments.clear(); if ((savedLimit < 0 || savedLimit > 0) && sqlOffset == 0) bufferedRecordEnvironments.add(new HashMap<String, Object>()); } finally { maxRows = savedMaxRows; limit = savedLimit; } /* * Rewind back to before the row so we can read it. */ currentRow = 0; recordEnvironment = null; updateRecordEnvironment(false); hitTail = true; } else if (this.orderByColumns != null) { /* * Read all rows into memory and sort them based on SQL ORDER BY expressions. */ int savedMaxRows = maxRows; int savedLimit = limit; maxRows = 0; limit = -1; try { while (next()) ; } finally { maxRows = savedMaxRows; limit = savedLimit; } sortRows(sqlOffset); /* * Rewind back to before first row so we can now read them in sorted order. */ currentRow = 0; recordEnvironment = null; updateRecordEnvironment(false); } else if (sqlOffset > 0) { int savedMaxRows = maxRows; int savedLimit = limit; maxRows = 0; limit = -1; /* * Skip the first n rows. */ try { while (sqlOffset > 0) { if (!next()) break; sqlOffset } } finally { maxRows = savedMaxRows; limit = savedLimit; currentRow = 0; if (bufferedRecordEnvironments != null) bufferedRecordEnvironments.clear(); } } } /** * Check that all selected and ORDER BY columns also appear in any GROUP BY clause. * @throws SQLException */ private void checkGroupBy() throws SQLException { if (this.groupByColumns != null) { for (Expression expr : this.groupByColumns) { List<String> exprUsedColumns = expr.usedColumns(); if (exprUsedColumns.isEmpty()) { /* * Must group by something that contains at least one column, not 'foo' or 1+1. */ throw new SQLException("Invalid GROUP BY column: " + expr.toString()); } } ArrayList<String> groupingColumns = new ArrayList<String>(); for (Expression expr : this.groupByColumns) { groupingColumns.addAll(expr.usedColumns()); } ArrayList<String> queryEnvironmentColumns = new ArrayList<String>(); for (int i = 0; i < this.queryEnvironment.size(); i++) { Object[] o = this.queryEnvironment.get(i); queryEnvironmentColumns.add(o[0].toString()); if (o[1] != null) { Expression expr = (Expression)o[1]; for (Object o2 : expr.usedColumns()) { queryEnvironmentColumns.add(o2.toString()); } } } for (int i = 0; i < this.queryEnvironment.size(); i++) { Object[] o = this.queryEnvironment.get(i); if (!groupingColumns.contains(o[0])) { if (o[1] != null) { Expression expr = (Expression)o[1]; for (Object o2 : expr.usedColumns()) { String columnName = o2.toString(); if (!groupingColumns.contains(columnName)) { String tableAlias = this.reader.getTableAlias(); if (tableAlias == null || (!groupingColumns.contains(tableAlias + "." + columnName))) { /* * GROUP BY must include all queried columns. */ throw new SQLException("Column not included in GROUP BY: " + columnName); } } } } } } if (this.havingClause != null) { for (String columnName : this.havingClause.usedColumns()) { if (!queryEnvironmentColumns.contains(columnName)) { throw new SQLException("Invalid HAVING column: " + columnName); } } } if (this.orderByColumns != null) { for (Object []o : this.orderByColumns) { Expression expr = (Expression)o[1]; for (Object o2 : expr.usedColumns()) { if (!queryEnvironmentColumns.contains(o2.toString())) throw new SQLException("ORDER BY column not included in GROUP BY: " + o2); } } } /* * A query containing GROUP BY without any aggregate functions can be simplified * to a SELECT DISTINCT, avoiding the need to load all records into memory. */ boolean hasAggregateFunctions = false; for (int i = 0; i < this.queryEnvironment.size(); i++) { Object[] o = this.queryEnvironment.get(i); Expression expr = (Expression)o[1]; if (expr.aggregateFunctions().size() > 0) hasAggregateFunctions = true; } if (this.havingClause != null && this.havingClause.aggregateFunctions().size() > 0) hasAggregateFunctions = true; if (!hasAggregateFunctions) { this.distinctValues = new HashSet<ArrayList<Object>>(); this.distinctColumns = new ArrayList<Expression>(this.groupByColumns); this.groupByColumns = null; } } } private void sortRows(int sqlOffset) { Map<String, Object> []allRows = new Map[bufferedRecordEnvironments.size()]; for (int i = 0; i < allRows.length; i++) allRows[i] = bufferedRecordEnvironments.get(i); bufferedRecordEnvironments.clear(); Arrays.sort(allRows, new OrderByComparator()); int rowLimit = allRows.length; if (maxRows != 0 && maxRows < rowLimit) rowLimit = maxRows; if (limit >= 0 && sqlOffset + limit < rowLimit) rowLimit = sqlOffset + limit; for (int i = sqlOffset; i < rowLimit; i++) bufferedRecordEnvironments.add(allRows[i]); } private void checkOpen() throws SQLException { if (isClosed) throw new SQLException("ResultSet is already closed"); } @Override public boolean next() throws SQLException { checkOpen(); if ((this.groupByColumns != null || this.aggregateFunctions.size() > 0 || this.orderByColumns != null || this.isScrollable == ResultSet.TYPE_SCROLL_SENSITIVE) && currentRow < bufferedRecordEnvironments.size()) { currentRow++; recordEnvironment = bufferedRecordEnvironments.get(currentRow - 1); updateRecordEnvironment(true); return true; } else { boolean thereWasAnAnswer; if(maxRows != 0 && currentRow >= maxRows) { // Do not fetch any more rows, we have reached the row limit set by caller. thereWasAnAnswer = false; } else if(limit >= 0 && currentRow >= limit) { thereWasAnAnswer = false; } else if(hitTail) { thereWasAnAnswer = false; } else { thereWasAnAnswer = reader.next(); } if(thereWasAnAnswer) recordEnvironment = reader.getEnvironment(); else recordEnvironment = null; // We have a where clause or DISTINCT keyword, honor it if (whereClause != null || distinctValues != null) { Map<String, Object> objectEnvironment = updateRecordEnvironment(thereWasAnAnswer); while (thereWasAnAnswer) { if (whereClause == null || whereClause.isTrue(objectEnvironment)) { /* * Check HAVING clause if no aggregate functions in query and * it is being processed just like SELECT DISTINCT. * In this case HAVING is exactly the same as a WHERE clause. */ if (this.distinctColumns == null || this.havingClause == null || this.havingClause.isTrue(objectEnvironment)) { if (distinctValues == null || addDistinctEnvironment(objectEnvironment)) { break; } } } thereWasAnAnswer = reader.next(); if(thereWasAnAnswer) recordEnvironment = reader.getEnvironment(); else recordEnvironment = null; objectEnvironment = updateRecordEnvironment(thereWasAnAnswer); } } if (this.orderByColumns != null || this.isScrollable == ResultSet.TYPE_SCROLL_SENSITIVE) { if(thereWasAnAnswer) { bufferedRecordEnvironments.add(reader.getEnvironment()); currentRow++; } else { hitTail = true; currentRow = bufferedRecordEnvironments.size() + 1; } } else { if (thereWasAnAnswer) currentRow++; else hitTail = true; } return thereWasAnAnswer; } } private Map<String, Object> updateRecordEnvironment(boolean thereWasAnAnswer) { HashMap<String, Object> objectEnvironment = new HashMap<String, Object>(); if(!thereWasAnAnswer) { recordEnvironment = null; return objectEnvironment; } for (int i = 0; i < queryEnvironment.size(); i++) { Object[] o = queryEnvironment.get(i); String key = (String) o[0]; Object value = ((Expression) o[1]).eval(recordEnvironment); objectEnvironment.put(key.toUpperCase(), value); } for (int i=0; i<usedColumns.size(); i++) { String key = usedColumns.get(i); key = key.toUpperCase(); if (!objectEnvironment.containsKey(key)) { objectEnvironment.put(key, recordEnvironment.get(key)); } } /* * Always include any group of rows so we have assembled so we can evaluate * any aggregate functions. */ String key = "@GROUPROWS"; Object groupRows = recordEnvironment.get(key); if (groupRows != null) objectEnvironment.put(key, groupRows); /* * Always include the data type converter object so we can correctly * convert data types when evaluating expressions such as MYDATE > '2012-06-31'. */ key = "@STRINGCONVERTER"; Object stringConverter = recordEnvironment.get(key); if (stringConverter != null) objectEnvironment.put(key, stringConverter); return objectEnvironment; } private boolean addDistinctEnvironment(Map<String, Object> objectEnvironment) { boolean isDistinct; /* * Create list of query values for this row, either for a simple * GROUP BY statement, or for a SELECT DISTINCT. */ ArrayList<Object> environment; if (this.distinctColumns != null) { environment = new ArrayList<Object>(distinctColumns.size()); for (int i = 0; i < distinctColumns.size(); i++) { Object value = distinctColumns.get(i).eval(objectEnvironment); environment.add(value); } } else { environment = new ArrayList<Object>(queryEnvironment.size()); for (int i = 0; i < queryEnvironment.size(); i++) { Object[] o = queryEnvironment.get(i); Object value = ((Expression) o[1]).eval(objectEnvironment); environment.add(value); } } /* * Has this list of values been read before for this query? */ isDistinct = distinctValues.add(environment); return isDistinct; } @Override public void close() throws SQLException { isClosed = true; reader.close(); } @Override public boolean wasNull() throws SQLException { if(lastIndexRead >= 0) { return getString(lastIndexRead) == null; } else { throw new SQLException("No previous getter method called"); } } // Methods for accessing results by column index @Override public String getString(int columnIndex) throws SQLException { // perform pre-accessor method processing preAccessor(columnIndex); Object[] o = queryEnvironment.get(columnIndex-1); try { return ((Expression) o[1]).eval(recordEnvironment).toString(); } catch (NullPointerException e) { return null; } } @Override public boolean getBoolean(int columnIndex) throws SQLException { return converter.parseBoolean(getString(columnIndex)); } @Override public byte getByte(int columnIndex) throws SQLException { return converter.parseByte(getString(columnIndex)); } @Override public short getShort(int columnIndex) throws SQLException { return converter.parseShort(getString(columnIndex)); } @Override public int getInt(int columnIndex) throws SQLException { return converter.parseInt(getString(columnIndex)); } @Override public long getLong(int columnIndex) throws SQLException { return converter.parseLong(getString(columnIndex)); } @Override public float getFloat(int columnIndex) throws SQLException { return converter.parseFloat(getString(columnIndex)); } @Override public double getDouble(int columnIndex) throws SQLException { return converter.parseDouble(getString(columnIndex)); } @Override @Deprecated public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException { // let getBigDecimal(int) handle this for now return getBigDecimal(columnIndex); } @Override public byte[] getBytes(int columnIndex) throws SQLException { return converter.parseBytes(getString(columnIndex)); } @Override public Date getDate(int columnIndex) throws SQLException { return (Date) getObject(columnIndex); } @Override public Time getTime(int columnIndex) throws SQLException { return (Time) getObject(columnIndex); } @Override public Timestamp getTimestamp(int columnIndex) throws SQLException { return (Timestamp) getObject(columnIndex); } @Override public InputStream getAsciiStream(int columnIndex) throws SQLException { return converter.parseAsciiStream(getString(columnIndex)); } @Override @Deprecated public InputStream getUnicodeStream(int columnIndex) throws SQLException { // delegate to getAsciiStream(int) return getAsciiStream(columnIndex); } @Override public InputStream getBinaryStream(int columnIndex) throws SQLException { // delegate to getAsciiStream(int) return getAsciiStream(columnIndex); } // Methods for accessing results by column name @Override public String getString(String columnName) throws SQLException { return getString(findColumn(columnName)); } @Override public boolean getBoolean(String columnName) throws SQLException { return getBoolean(findColumn(columnName)); } @Override public byte getByte(String columnName) throws SQLException { return getByte(findColumn(columnName)); } @Override public short getShort(String columnName) throws SQLException { return getShort(findColumn(columnName)); } @Override public int getInt(String columnName) throws SQLException { return getInt(findColumn(columnName)); } @Override public long getLong(String columnName) throws SQLException { return getLong(findColumn(columnName)); } @Override public float getFloat(String columnName) throws SQLException { return getFloat(findColumn(columnName)); } @Override public double getDouble(String columnName) throws SQLException { return getDouble(findColumn(columnName)); } @Override @Deprecated public BigDecimal getBigDecimal(String columnName, int scale) throws SQLException { return getBigDecimal(findColumn(columnName)); } @Override public byte[] getBytes(String columnName) throws SQLException { return getBytes(findColumn(columnName)); } @Override public Date getDate(String columnName) throws SQLException { return getDate(findColumn(columnName)); } @Override public Time getTime(String columnName) throws SQLException { return getTime(findColumn(columnName)); } @Override public Timestamp getTimestamp(String columnName) throws SQLException { return getTimestamp(findColumn(columnName)); } @Override public InputStream getAsciiStream(String columnName) throws SQLException { return getAsciiStream(findColumn(columnName)); } @Override @Deprecated public InputStream getUnicodeStream(String columnName) throws SQLException { return getUnicodeStream(findColumn(columnName)); } @Override public InputStream getBinaryStream(String columnName) throws SQLException { return getBinaryStream(findColumn(columnName)); } // Advanced features: @Override public SQLWarning getWarnings() throws SQLException { return null; } @Override public void clearWarnings() throws SQLException { } @Override public String getCursorName() throws SQLException { throw new UnsupportedOperationException( "ResultSet.getCursorName() unsupported"); } @Override public ResultSetMetaData getMetaData() throws SQLException { if (resultSetMetaData == null) { String[] readerTypeNames = reader.getColumnTypes(); String[] readerColumnNames = reader.getColumnNames(); int[] readerColumnSizes = reader.getColumnSizes(); String tableAlias = reader.getTableAlias(); int columnCount = queryEnvironment.size(); String []columnNames = new String[columnCount]; String []columnLabels = new String[columnCount]; int []columnSizes = new int[columnCount]; String []typeNames = new String[columnCount]; /* * Create a record containing dummy values. */ HashMap<String, Object> env = new HashMap<String, Object>(); for(int i=0; i<readerTypeNames.length; i++) { Object literal = StringConverter.getLiteralForTypeName(readerTypeNames[i]); String columnName = readerColumnNames[i].toUpperCase(); env.put(columnName, literal); if (tableAlias != null) env.put(tableAlias + "." + columnName, literal); } if (converter != null) env.put("@STRINGCONVERTER", converter); for(int i=0; i<columnCount; i++) { Object[] o = queryEnvironment.get(i); columnNames[i] = (String)o[0]; columnLabels[i] = columnNames[i]; /* * Evaluate each expression to determine what data type it returns. */ Object result = null; try { Expression expr = ((Expression)o[1]); int columnSize = DataReader.DEFAULT_COLUMN_SIZE; if (expr instanceof ColumnName) { String usedColumn = expr.usedColumns().get(0); for (int k = 0; k < readerColumnNames.length; k++) { if (usedColumn.equalsIgnoreCase(readerColumnNames[k])) { columnSize = readerColumnSizes[k]; break; } } } columnSizes[i] = columnSize; result = expr.eval(env); } catch (NullPointerException e) { /* Expression is invalid */ // TODO: should we throw an SQLException here? } if (result != null) typeNames[i] = StringConverter.getTypeNameForLiteral(result); else typeNames[i] = "expression"; } resultSetMetaData = new CsvResultSetMetaData(tableName, columnNames, columnLabels, typeNames, columnSizes); } return resultSetMetaData; } @Override public Object getObject(int columnIndex) throws SQLException { // perform pre-accessor method processing preAccessor(columnIndex); Object[] o = queryEnvironment.get(columnIndex-1); try { return ((Expression) o[1]).eval(recordEnvironment); } catch (NullPointerException e) { return null; } } @Override public Object getObject(String columnName) throws SQLException { return getObject(findColumn(columnName)); } // Getters and Setters @Override public Reader getCharacterStream(int columnIndex) throws SQLException { String str = getString(columnIndex); return (str == null) ? null : new StringReader(str); } @Override public Reader getCharacterStream(String columnName) throws SQLException { String str = getString(columnName); return (str == null) ? null : new StringReader(str); } @Override public BigDecimal getBigDecimal(int columnIndex) throws SQLException { BigDecimal retval = null; String str = getString(columnIndex); if(str != null) { try { retval = new BigDecimal(str); } catch (NumberFormatException e) { throw new SQLException("Could not convert '" + str + "' to " + "a java.math.BigDecimal object"); } } return retval; } @Override public BigDecimal getBigDecimal(String columnName) throws SQLException { return getBigDecimal(findColumn(columnName)); } // Traversal/Positioning @Override public boolean isBeforeFirst() throws SQLException { checkOpen(); if (this.isScrollable == ResultSet.TYPE_SCROLL_SENSITIVE) { return currentRow == 0; } else { throw new UnsupportedOperationException( "ResultSet.isBeforeFirst() unsupported"); } } @Override public boolean isAfterLast() throws SQLException { checkOpen(); if (this.isScrollable == ResultSet.TYPE_SCROLL_SENSITIVE) { return currentRow == bufferedRecordEnvironments.size() + 1; } else { throw new UnsupportedOperationException( "ResultSet.isAfterLast() unsupported"); } } @Override public boolean isFirst() throws SQLException { checkOpen(); if (this.isScrollable == ResultSet.TYPE_SCROLL_SENSITIVE) { return currentRow == 1; } else { throw new UnsupportedOperationException( "ResultSet.isFirst() unsupported"); } } @Override public boolean isLast() throws SQLException { checkOpen(); if (this.isScrollable == ResultSet.TYPE_SCROLL_SENSITIVE) { if (!hitTail && currentRow != 0) { next(); previous(); } return (currentRow == bufferedRecordEnvironments.size()); } else { throw new UnsupportedOperationException( "ResultSet.isLast() unsupported"); } } @Override public void beforeFirst() throws SQLException { checkOpen(); if (this.isScrollable == ResultSet.TYPE_SCROLL_SENSITIVE) { first(); previous(); } else { throw new UnsupportedOperationException( "ResultSet.beforeFirst() unsupported"); } } @Override public void afterLast() throws SQLException { checkOpen(); if (this.isScrollable == ResultSet.TYPE_SCROLL_SENSITIVE) { while(next()); } else { throw new UnsupportedOperationException( "ResultSet.afterLast() unsupported"); } } @Override public boolean first() throws SQLException { checkOpen(); if (this.isScrollable == ResultSet.TYPE_SCROLL_SENSITIVE) { currentRow = 0; boolean thereWasAnAnswer = next(); updateRecordEnvironment(thereWasAnAnswer); return thereWasAnAnswer; } else { throw new UnsupportedOperationException( "ResultSet.first() unsupported"); } } @Override public boolean last() throws SQLException { checkOpen(); if (this.isScrollable == ResultSet.TYPE_SCROLL_SENSITIVE) { afterLast(); previous(); return (this.bufferedRecordEnvironments.size() != 0); } else { throw new UnsupportedOperationException("ResultSet.last() unsupported"); } } @Override public int getRow() throws SQLException { if (this.isScrollable == ResultSet.TYPE_SCROLL_SENSITIVE) { return currentRow; } else { throw new UnsupportedOperationException( "ResultSet.getRow() unsupported"); } } @Override public boolean absolute(int row) throws SQLException { checkOpen(); if (this.isScrollable == ResultSet.TYPE_SCROLL_SENSITIVE) { boolean found; if(row < 0) { last(); row = currentRow + row + 1; } else { // this is a no-op if we have already buffered enough lines. while((bufferedRecordEnvironments.size() < row) && next()); } if (row <= 0) { found = false; currentRow = 0; } else if(row > bufferedRecordEnvironments.size()) { found = false; currentRow = bufferedRecordEnvironments.size() + 1; } else { found = true; currentRow = row; recordEnvironment = bufferedRecordEnvironments.get(currentRow - 1); } updateRecordEnvironment(found); return found; } else { throw new UnsupportedOperationException( "ResultSet.absolute() unsupported"); } } @Override public boolean relative(int rows) throws SQLException { checkOpen(); if (this.isScrollable == ResultSet.TYPE_SCROLL_SENSITIVE) { if(currentRow + rows >= 0) return absolute(currentRow + rows); currentRow = 0; updateRecordEnvironment(false); return false; } else { throw new UnsupportedOperationException( "ResultSet.relative() unsupported"); } } @Override public boolean previous() throws SQLException { checkOpen(); if (this.isScrollable == ResultSet.TYPE_SCROLL_SENSITIVE) { if(currentRow > 1) { currentRow recordEnvironment = bufferedRecordEnvironments.get(currentRow - 1); updateRecordEnvironment(true); return true; } else { currentRow = 0; recordEnvironment = null; updateRecordEnvironment(false); return false; } } else { throw new UnsupportedOperationException( "ResultSet.previous() unsupported"); } } // Properties @Override public void setFetchDirection(int direction) throws SQLException { throw new UnsupportedOperationException( "ResultSet.setFetchDirection(int) unsupported"); } @Override public int getFetchDirection() throws SQLException { throw new UnsupportedOperationException( "ResultSet.getFetchDirection() unsupported"); } @Override public void setFetchSize(int rows) throws SQLException { fetchSize = rows; } @Override public int getFetchSize() throws SQLException { return fetchSize; } @Override public int getType() throws SQLException { return isScrollable; } @Override public int getConcurrency() throws SQLException { return CONCUR_READ_ONLY; } // Updates @Override public boolean rowUpdated() throws SQLException { throw new UnsupportedOperationException( "ResultSet.rowUpdated() unsupported"); } @Override public boolean rowInserted() throws SQLException { throw new UnsupportedOperationException( "ResultSet.rowInserted() unsupported"); } @Override public boolean rowDeleted() throws SQLException { throw new UnsupportedOperationException( "ResultSet.rowDeleted() unsupported"); } @Override public void updateNull(int columnIndex) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateNull() unsupported"); } @Override public void updateBoolean(int columnIndex, boolean x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateBoolean() unsupported"); } @Override public void updateByte(int columnIndex, byte x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateByte() unsupported"); } @Override public void updateShort(int columnIndex, short x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateShort() unsupported"); } @Override public void updateInt(int columnIndex, int x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateInt() unsupported"); } @Override public void updateLong(int columnIndex, long x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateLong(int, long) unsupported"); } @Override public void updateFloat(int columnIndex, float x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateFloat(int, float) unsupported"); } @Override public void updateDouble(int columnIndex, double x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateDouble(int, double) unsupported"); } @Override public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateBigDecimal(int, BigDecimal) unsupported"); } @Override public void updateString(int columnIndex, String x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateString(int, String) unsupported"); } @Override public void updateBytes(int columnIndex, byte[] x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateBytes(int, byte[]) unsupported"); } @Override public void updateDate(int columnIndex, Date x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateDate(int, Date) unsupported"); } @Override public void updateTime(int columnIndex, Time x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateTime(int, Time) unsupported"); } @Override public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateTimestamp(int, Timestamp) unsupported"); } @Override public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException { throw new UnsupportedOperationException("ResultSet.updateAsciiStream " + "(int, InputStream, int) unsupported"); } @Override public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException { throw new UnsupportedOperationException("ResultSet.updateBinaryStream" + "(int, InputStream, int) unsupported"); } @Override public void updateCharacterStream(int columnIndex, Reader x, int length) throws SQLException { throw new UnsupportedOperationException("ResultSet.updateCharacterStr" + "eam(int, Reader, int) unsupported"); } @Override public void updateObject(int columnIndex, Object x, int scale) throws SQLException { throw new UnsupportedOperationException( "ResultSet.udpateObject(int, Object) unsupported"); } @Override public void updateObject(int columnIndex, Object x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateObject(int, Object, int) unsupported"); } @Override public void updateNull(String columnName) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateNull(String) unsupported"); } @Override public void updateBoolean(String columnName, boolean x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateBoolean(String, boolean) unsupported"); } @Override public void updateByte(String columnName, byte x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateByte(String, byte) unsupported"); } @Override public void updateShort(String columnName, short x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateShort(String, short) unsupported"); } @Override public void updateInt(String columnName, int x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateInt(String, int) unsupported"); } @Override public void updateLong(String columnName, long x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateLong(String, long) unsupported"); } @Override public void updateFloat(String columnName, float x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateFloat(String, float) unsupported"); } @Override public void updateDouble(String columnName, double x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateDouble(String, double) unsupported"); } @Override public void updateBigDecimal(String columnName, BigDecimal x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateBigDecimal(String, BigDecimal) unsupported"); } @Override public void updateString(String columnName, String x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateString(String, String) unsupported"); } @Override public void updateBytes(String columnName, byte[] x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateBytes(String, byte[]) unsupported"); } @Override public void updateDate(String columnName, Date x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateDate(String, Date) unsupported"); } @Override public void updateTime(String columnName, Time x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateTime(String, Time) unsupported"); } @Override public void updateTimestamp(String columnName, Timestamp x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateTimestamp(String, Timestamp) unsupported"); } @Override public void updateAsciiStream(String columnName, InputStream x, int length) throws SQLException { throw new UnsupportedOperationException("ResultSet.updateAsciiStream" + "(String, InputStream, int) unsupported"); } @Override public void updateBinaryStream(String columnName, InputStream x, int length) throws SQLException { throw new UnsupportedOperationException("ResultSet.updateBinaryStream" + "(String, InputStream, int) unsupported"); } @Override public void updateCharacterStream(String columnName, Reader reader, int length) throws SQLException { throw new UnsupportedOperationException("ResultSet.updateCharacterStr" + "eam(String, Reader, int) unsupported"); } @Override public void updateObject(String columnName, Object x, int scale) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateObject(String, Object, int) unsupported"); } @Override public void updateObject(String columnName, Object x) throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateObject(String, Object) unsupported"); } @Override public void insertRow() throws SQLException { throw new UnsupportedOperationException( "ResultSet.insertRow() unsupported"); } @Override public void updateRow() throws SQLException { throw new UnsupportedOperationException( "ResultSet.updateRow() unsupported"); } @Override public void deleteRow() throws SQLException { throw new UnsupportedOperationException( "ResultSet.deleteRow() unsupported"); } @Override public void refreshRow() throws SQLException { throw new UnsupportedOperationException( "ResultSet.refreshRow() unsupported"); } @Override public void cancelRowUpdates() throws SQLException { throw new UnsupportedOperationException( "ResultSet.cancelRowUpdates() unsupported"); } @Override public void moveToInsertRow() throws SQLException { throw new UnsupportedOperationException( "ResultSet.moveToInsertRow() unsupported"); } @Override public void moveToCurrentRow() throws SQLException { throw new UnsupportedOperationException( "ResultSet.moveToeCurrentRow() unsupported"); } @Override public Statement getStatement() throws SQLException { return statement; } @Override public Object getObject(int i, Map<String,Class<?>> map) throws SQLException { throw new UnsupportedOperationException( "ResultSet.getObject(int, Map) unsupported"); } @Override public Ref getRef(int i) throws SQLException { throw new UnsupportedOperationException( "ResultSet.getRef(int) unsupported"); } @Override public Blob getBlob(int i) throws SQLException { throw new UnsupportedOperationException( "ResultSet.getBlob(int) unsupported"); } @Override public Clob getClob(int i) throws SQLException { throw new UnsupportedOperationException( "ResultSet.getClob(int) unsupported"); } @Override public Array getArray(int i) throws SQLException { throw new UnsupportedOperationException( "ResultSet.getArray(int) unsupported"); } @Override public Object getObject(String colName, Map<String,Class<?>> map) throws SQLException { throw new UnsupportedOperationException( "ResultSet.getObject(String, Map) unsupported"); } @Override public Ref getRef(String colName) throws SQLException { throw new UnsupportedOperationException( "ResultSet.getRef(String) unsupported"); } @Override public Blob getBlob(String colName) throws SQLException { throw new UnsupportedOperationException( "ResultSet.getBlob(String) unsupported"); } @Override public Clob getClob(String colName) throws SQLException { throw new UnsupportedOperationException( "ResultSet.getClob(String) unsupported"); } @Override public Array getArray(String colName) throws SQLException { throw new UnsupportedOperationException( "ResultSet.getArray(String) unsupported"); } @Override public Date getDate(int columnIndex, Calendar cal) throws SQLException { throw new UnsupportedOperationException( "ResultSet.getDate(int, Calendar) unsupported"); } @Override public Date getDate(String columnName, Calendar cal) throws SQLException { throw new UnsupportedOperationException( "ResultSet.getDate(String, Calendar) unsupported"); } @Override public Time getTime(int columnIndex, Calendar cal) throws SQLException { throw new UnsupportedOperationException( "ResultSet.getTime(int, Calendar) unsupported"); } @Override public Time getTime(String columnName, Calendar cal) throws SQLException { throw new UnsupportedOperationException( "ResultSet.getTime(String, Calendar) unsupported"); } @Override public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException { throw new UnsupportedOperationException( "ResultSet.getTimestamp(int, Calendar) unsupported"); } @Override public Timestamp getTimestamp(String columnName, Calendar cal) throws SQLException { throw new UnsupportedOperationException( "ResultSet.getTimestamp(String, Calendar) unsupported"); } // CSV JDBC private helper methods /** * Perform pre-accessor method processing * @param columnIndex the first column is 1, the second is 2, ... * @exception SQLException if a database access error occurs */ private void preAccessor(int columnIndex) throws SQLException { // set last read column index for wasNull() lastIndexRead = columnIndex; if (columnIndex < 1 || columnIndex > this.queryEnvironment.size()) { throw new SQLException("Column not found: invalid index: "+columnIndex); } } @Override public URL getURL(int columnIndex) throws SQLException { throw new UnsupportedOperationException("ResultSet.getURL(int) unsupported"); } @Override public URL getURL(String columnName) throws SQLException { throw new UnsupportedOperationException("ResultSet.getURL(String) unsupported"); } @Override public void updateRef(int columnIndex, Ref x) throws SQLException { throw new UnsupportedOperationException("ResultSet.updateRef(int,java.sql.Ref) unsupported"); } @Override public void updateRef(String columnName, Ref x) throws SQLException { throw new UnsupportedOperationException("ResultSet.updateRef(String,java.sql.Ref) unsupported"); } @Override public void updateBlob(int columnIndex, Blob x) throws SQLException { throw new UnsupportedOperationException("ResultSet.updateBlob(int,java.sql.Blob) unsupported"); } @Override public void updateBlob(String columnName, Blob x) throws SQLException { throw new UnsupportedOperationException("ResultSet.updateBlob(String,java.sql.Blob) unsupported"); } @Override public void updateClob(int columnIndex, Clob x) throws SQLException { throw new UnsupportedOperationException("ResultSet.updateClob(int,java.sql.Clob) unsupported"); } @Override public void updateClob(String columnName, Clob x) throws SQLException { throw new UnsupportedOperationException("ResultSet.updateClob(String,java.sql.Clob) unsupported"); } @Override public void updateArray(int columnIndex, Array x) throws SQLException { throw new UnsupportedOperationException("ResultSet.updateArray(int,java.sql.Array) unsupported"); } @Override public void updateArray(String columnName, Array x) throws SQLException { throw new UnsupportedOperationException("ResultSet.updateArray(String,java.sql.Array) unsupported"); } @Override public int getHoldability() throws SQLException { // TODO Auto-generated method stub return 0; } @Override public Reader getNCharacterStream(int columnIndex) throws SQLException { // TODO Auto-generated method stub return null; } @Override public Reader getNCharacterStream(String columnLabel) throws SQLException { // TODO Auto-generated method stub return null; } @Override public String getNString(int columnIndex) throws SQLException { // TODO Auto-generated method stub return null; } @Override public String getNString(String columnLabel) throws SQLException { // TODO Auto-generated method stub return null; } @Override public boolean isClosed() throws SQLException { return isClosed; } @Override public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException { // TODO Auto-generated method stub } @Override public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException { // TODO Auto-generated method stub } @Override public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException { // TODO Auto-generated method stub } @Override public void updateAsciiStream(String columnLabel, InputStream x, long length) throws SQLException { // TODO Auto-generated method stub } @Override public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException { // TODO Auto-generated method stub } @Override public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException { // TODO Auto-generated method stub } @Override public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException { // TODO Auto-generated method stub } @Override public void updateBinaryStream(String columnLabel, InputStream x, long length) throws SQLException { // TODO Auto-generated method stub } @Override public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException { // TODO Auto-generated method stub } @Override public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException { // TODO Auto-generated method stub } @Override public void updateBlob(int columnIndex, InputStream inputStream, long length) throws SQLException { // TODO Auto-generated method stub } @Override public void updateBlob(String columnLabel, InputStream inputStream, long length) throws SQLException { // TODO Auto-generated method stub } @Override public void updateCharacterStream(int columnIndex, Reader x) throws SQLException { // TODO Auto-generated method stub } @Override public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException { // TODO Auto-generated method stub } @Override public void updateCharacterStream(int columnIndex, Reader x, long length) throws SQLException { // TODO Auto-generated method stub } @Override public void updateCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { // TODO Auto-generated method stub } @Override public void updateClob(int columnIndex, Reader reader) throws SQLException { // TODO Auto-generated method stub } @Override public void updateClob(String columnLabel, Reader reader) throws SQLException { // TODO Auto-generated method stub } @Override public void updateClob(int columnIndex, Reader reader, long length) throws SQLException { // TODO Auto-generated method stub } @Override public void updateClob(String columnLabel, Reader reader, long length) throws SQLException { // TODO Auto-generated method stub } @Override public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException { // TODO Auto-generated method stub } @Override public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException { // TODO Auto-generated method stub } @Override public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException { // TODO Auto-generated method stub } @Override public void updateNCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { // TODO Auto-generated method stub } @Override public void updateNClob(int columnIndex, Reader reader) throws SQLException { // TODO Auto-generated method stub } @Override public void updateNClob(String columnLabel, Reader reader) throws SQLException { // TODO Auto-generated method stub } @Override public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException { // TODO Auto-generated method stub } @Override public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException { // TODO Auto-generated method stub } @Override public void updateNString(int columnIndex, String string) throws SQLException { // TODO Auto-generated method stub } @Override public void updateNString(String columnLabel, String string) throws SQLException { // TODO Auto-generated method stub } @Override public boolean isWrapperFor(Class<?> arg0) throws SQLException { // TODO Auto-generated method stub return false; } @Override public <T> T unwrap(Class<T> arg0) throws SQLException { // TODO Auto-generated method stub return null; } @Override public int findColumn(String columnLabel) throws SQLException { checkOpen(); if (columnLabel.equals("")) throw new SQLException("Can't access columns with empty name by name"); for (int i = 0; i < this.queryEnvironment.size(); i++) { Object[] queryEnvEntry = this.queryEnvironment.get(i); if(((String)queryEnvEntry[0]).equalsIgnoreCase(columnLabel)) return i+1; } throw new SQLException("Column not found: " + columnLabel); } @Override public NClob getNClob(int arg0) throws SQLException { // TODO Auto-generated method stub return null; } @Override public NClob getNClob(String arg0) throws SQLException { // TODO Auto-generated method stub return null; } @Override public RowId getRowId(int arg0) throws SQLException { // TODO Auto-generated method stub return null; } @Override public RowId getRowId(String arg0) throws SQLException { // TODO Auto-generated method stub return null; } @Override public SQLXML getSQLXML(int arg0) throws SQLException { // TODO Auto-generated method stub return null; } @Override public SQLXML getSQLXML(String arg0) throws SQLException { // TODO Auto-generated method stub return null; } @Override public void updateNClob(int arg0, NClob arg1) throws SQLException { // TODO Auto-generated method stub } @Override public void updateNClob(String arg0, NClob arg1) throws SQLException { // TODO Auto-generated method stub } @Override public void updateRowId(int arg0, RowId arg1) throws SQLException { // TODO Auto-generated method stub } @Override public void updateRowId(String arg0, RowId arg1) throws SQLException { // TODO Auto-generated method stub } @Override public void updateSQLXML(int arg0, SQLXML arg1) throws SQLException { // TODO Auto-generated method stub } @Override public void updateSQLXML(String arg0, SQLXML arg1) throws SQLException { // TODO Auto-generated method stub } public <T> T getObject(String columnLabel, Class<T> type) throws SQLException { throw new UnsupportedOperationException( "ResultSet.getObject(String, Class<T>) not supported"); } public <T> T getObject(int columnIndex, Class<T> type) throws SQLException { throw new UnsupportedOperationException( "ResultSet.getObject(int, Class<T>) not supported"); } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platfroms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:20-07-30"); this.setApiVersion("16.7.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platforms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:21-10-23"); this.setApiVersion("17.12.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platforms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:21-09-04"); this.setApiVersion("17.5.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platforms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:21-12-18"); this.setApiVersion("17.16.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
package com.tr.maze; import java.util.ArrayDeque; import java.util.Deque; import java.util.HashSet; import java.util.Optional; import java.util.Set; /** * Applies the DFS algirithm to solve the Maze. Uses stack data structure to keep a track of visited * blocks. Traverses the aisles recursively until reaches finish/ doesn't find a path to finish * * @author Narain Mittal * */ public class MazeSolverDfs implements IMazeSolver { private Maze maze; private final Deque<Block> stack = new ArrayDeque<>(); private final Set<Block> visitedBlocks = new HashSet<>(); private final Set<Block> solutionBlocks = new HashSet<>(); @Override public boolean solve(Maze maze) { this.maze = maze; this.visitedBlocks.add(maze.getStart()); this.solutionBlocks.add(maze.getStart()); this.stack.push(maze.getStart()); return solveDfs(); } @Override public Block getNextTraversableAisle(Block block) { int x = block.getX(); int y = block.getY(); Block next = null; if (isValidAisle(x + 1, y)) { next = maze.getBlock(x + 1, y); } else if (isValidAisle(x, y + 1)) { next = maze.getBlock(x, y + 1); } else if (isValidAisle(x, y - 1)) { next = maze.getBlock(x, y - 1); } else if (isValidAisle(x - 1, y)) { next = maze.getBlock(x - 1, y); } return next; } /** * Checks if the block at designated indices is an Aisle which is not visited yet. * * @param x * @param y * @return true if valid, false otherwise */ private boolean isValidAisle(int x, int y) { try { Block b = maze.getBlock(x, y); return (b instanceof Aisle) && !isBlockVisited(b); } catch (ArrayIndexOutOfBoundsException e) { return false; } } private boolean solveDfs() { Optional<Block> block = Optional.ofNullable(stack.peekFirst()); if (!block.isPresent()) { // stack empty and not reached the finish yet; no solution return false; } else if (block.get().equals(maze.getEnd())) { // reached finish, exit the program return true; } else { Optional<Block> next = block.map(this::getNextTraversableAisle); if (!next.isPresent()) { // Dead end, backtrack and chose alternate path backTrack(); } else { // Traverse next block traverseNextBlock(next.get()); } } return solveDfs(); } private Optional backTrack(){ this.solutionBlocks.remove(stack.pop()); return Optional.empty(); } private void traverseNextBlock(Block next){ this.solutionBlocks.add(next); this.visitedBlocks.add(next); stack.push(next); } @Override public boolean isBlockInSolution(Block block) { return solutionBlocks.contains(block); } @Override public boolean isBlockVisited(Block block) { return visitedBlocks.contains(block); } }
package controller; import java.io.File; import java.io.IOException; import java.util.List; import java.util.Optional; import javafx.fxml.FXML; import javafx.event.ActionEvent; import javafx.scene.control.Alert; import javafx.scene.control.SelectionMode; import javafx.scene.control.TableView; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.util.Pair; import persistence.CsvPersistenceManager; import model.credentials.CredentialsEntity; import view.dialog.EntityDialog; public class HomeController extends Controller{ @FXML private TableView<CredentialsEntity> tableView; private File passwordsCSV = new File("passwords.csv"); private CsvPersistenceManager csvPersistenceManager = new CsvPersistenceManager(passwordsCSV); private ObservableList<CredentialsEntity> credentials; public HomeController(){ try { credentials = FXCollections.observableArrayList(csvPersistenceManager.loadCredentials()); } catch (IOException e) { e.printStackTrace(); } } @FXML public void initialize(){ tableView.getSelectionModel().setSelectionMode(SelectionMode.MULTIPLE); tableView.setItems(credentials); } @FXML protected void handleAddButtonAction(ActionEvent event){ EntityDialog addPasswordDialog = new EntityDialog(authenticator, EntityDialog.ADD_PASSWORD_TITLE, ""); Optional<Pair<Boolean, CredentialsEntity>> result = addPasswordDialog.showAndWait(); while (result.isPresent()) { if(result.get().getKey()) { credentials.add(result.get().getValue()); saveCredentials(credentials); break; } else { CredentialsEntity credentialsEntity = result.get().getValue(); addPasswordDialog = new EntityDialog( authenticator, credentialsEntity.getPlace(), credentialsEntity.getUsername(), credentialsEntity.getNote(), EntityDialog.ADD_PASSWORD_TITLE, "Main password incorrect"); result = addPasswordDialog.showAndWait(); } } } @FXML protected void handleDeleteButtonAction(ActionEvent event){ ObservableList<CredentialsEntity> credentialsEntities = tableView.getSelectionModel().getSelectedItems(); credentials.removeAll(credentialsEntities); saveCredentials(credentials); } @FXML protected void handleEditButtonAction(ActionEvent event){ ObservableList<CredentialsEntity> credentialsEntities = tableView.getSelectionModel().getSelectedItems(); if(credentialsEntities.size() == 1){ CredentialsEntity currentEntity = credentialsEntities.get(0); EntityDialog entityDialog = new EntityDialog( authenticator, currentEntity.getPlace(), currentEntity.getUsername(), currentEntity.getNote(), EntityDialog.EDIT_PASSWORD_TITLE, ""); Optional<Pair<Boolean, CredentialsEntity>> result = entityDialog.showAndWait(); while (result.isPresent()) { if(result.get().getKey()) { credentials.set(credentials.indexOf(currentEntity), result.get().getValue()); saveCredentials(credentials); break; } else { CredentialsEntity credentialsEntity = result.get().getValue(); entityDialog = new EntityDialog( authenticator, credentialsEntity.getPlace(), credentialsEntity.getUsername(), credentialsEntity.getNote(), EntityDialog.EDIT_PASSWORD_TITLE, "Main password incorrect"); result = entityDialog.showAndWait(); } } } else{ Alert alert = new Alert(Alert.AlertType.INFORMATION); alert.setTitle("Information"); alert.setHeaderText(null); alert.setContentText("You can edit only one item at a time."); alert.showAndWait(); } } private void saveCredentials(List credentials){ try { csvPersistenceManager.saveCredentials(credentials); } catch (IOException e) { e.printStackTrace(); } } }
package de.prob2.ui.config; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.Writer; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.inject.Inject; import com.google.inject.Singleton; import de.prob.Main; import de.prob.model.representation.AbstractElement; import de.prob2.ui.menu.RecentFiles; import de.prob2.ui.states.ClassBlacklist; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @Singleton @SuppressWarnings("AccessingNonPublicFieldOfAnotherObject") public final class Config { private static final class ConfigData { private int maxRecentFiles; private List<String> recentFiles; private List<String> statesViewHiddenClasses; } private static final File LOCATION = new File(Main.getProBDirectory() + File.separator + "prob2ui" + File.separator + "config.json"); private static final File DEFAULT; static { try { DEFAULT = new File(Config.class.getResource("default.json").toURI()); } catch (URISyntaxException e) { throw new IllegalStateException(e); } } private static final Logger logger = LoggerFactory.getLogger(Config.class); private final Gson gson; private final ClassBlacklist classBlacklist; private final RecentFiles recentFiles; private final ConfigData defaultData; @Inject private Config(final ClassBlacklist classBlacklist, final RecentFiles recentFiles) { this.gson = new GsonBuilder().setPrettyPrinting().create(); this.classBlacklist = classBlacklist; this.recentFiles = recentFiles; try (final Reader defaultReader = new InputStreamReader(new FileInputStream(DEFAULT), "UTF-8")) { this.defaultData = gson.fromJson(defaultReader, ConfigData.class); } catch (FileNotFoundException exc) { throw new IllegalStateException("Default config file not found", exc); } catch (IOException exc) { throw new IllegalStateException("Failed to open default config file", exc); } if (!LOCATION.getParentFile().exists() && !LOCATION.getParentFile().mkdirs()) { logger.warn("Failed to create the parent directory for the config file {}", LOCATION.getAbsolutePath()); } this.load(); } public void load() { ConfigData configData; try (final Reader reader = new InputStreamReader(new FileInputStream(LOCATION), "UTF-8")) { configData = gson.fromJson(reader, ConfigData.class); } catch (FileNotFoundException ignored) { // NOSONAR // Config file doesn't exist yet, use the defaults configData = this.defaultData; } catch (IOException exc) { logger.warn("Failed to open config file", exc); return; } // If some keys are null (for example when loading a config from a previous version that did not have those keys), replace them with their values from the default config. if (configData.recentFiles == null) { configData.maxRecentFiles = this.defaultData.maxRecentFiles; configData.recentFiles = new ArrayList<>(this.defaultData.recentFiles); } if (configData.statesViewHiddenClasses == null) { configData.statesViewHiddenClasses = new ArrayList<>(this.defaultData.statesViewHiddenClasses); } this.recentFiles.setMaximum(configData.maxRecentFiles); this.recentFiles.setAll(configData.recentFiles); for (String name : configData.statesViewHiddenClasses) { Class<? extends AbstractElement> clazz; try { clazz = Class.forName(name).asSubclass(AbstractElement.class); } catch (ClassNotFoundException exc) { logger.warn("Class not found, cannot add to states view blacklist", exc); continue; } catch (ClassCastException exc) { logger.warn("Class is not a subclass of AbstractElement, cannot add to states view blacklist", exc); continue; } classBlacklist.getKnownClasses().add(clazz); classBlacklist.getBlacklist().add(clazz); } } public void save() { final ConfigData configData = new ConfigData(); configData.maxRecentFiles = this.recentFiles.getMaximum(); configData.recentFiles = new ArrayList<>(this.recentFiles); configData.statesViewHiddenClasses = new ArrayList<>(); for (Class<? extends AbstractElement> clazz : classBlacklist.getBlacklist()) { configData.statesViewHiddenClasses.add(clazz.getCanonicalName()); } try (final Writer writer = new OutputStreamWriter(new FileOutputStream(LOCATION), "UTF-8")) { gson.toJson(configData, writer); } catch (FileNotFoundException exc) { logger.warn("Failed to create config file", exc); } catch (IOException exc) { logger.warn("Failed to save config file", exc); } } }
package fr.cs.orekit.frames; import java.io.Serializable; import java.util.HashMap; import java.util.LinkedList; import org.apache.commons.math.geometry.Rotation; import fr.cs.orekit.errors.FrameAncestorException; import fr.cs.orekit.errors.OrekitException; import fr.cs.orekit.time.AbsoluteDate; public class Frame implements Serializable { /** International Terrestrial Reference Frame 2000 A. * <p> Replaces the old ECEF representation. <p> */ public static final FrameType ITRF2000A = new FrameType("ITRF2000A"); /** International Terrestrial Reference Frame 2000 B. * <p> Replaces the old ECEF representation. <p> */ public static final FrameType ITRF2000B = new FrameType("ITRF2000B"); /** Intermediate Reference Frame 2000 A : true equinox and equator of date. * <p> Precession and nutation effects with maximal precision and no * earth rotation. <p> */ public static final FrameType IRF2000A = new FrameType("IRF2000A"); /** Intermediate Reference Frame 2000 B : true equinox and equator of date. * <p> Precession and nutation effects with less precision and no * earth rotation. <p> */ public static final FrameType IRF2000B = new FrameType("IRF2000B"); /** Terrestrial Intermediate Reference Frame 2000 A. * <p> The pole motion is not considered.</p> */ public static final FrameType TIRF2000A = new FrameType("TIRF2000A"); /** Terrestrial Intermediate Reference Frame 2000 B. * <p> The pole motion is not considered.</p> */ public static final FrameType TIRF2000B = new FrameType("TIRF2000B"); /** Veis 1950 frame. * <p>This frame is sometimes referred to as * <em>&gamma;<sub>50</sub> CNES</em></p> */ public static final FrameType VEIS1950 = new FrameType("VEIS1950"); /** Serialiazable UID. */ private static final long serialVersionUID = 2071889292905823128L; /** parent frame (only J2000 doesn't have a parent). */ private final Frame parent; /** Transform from parent frame to instance. */ private Transform transform; /** Map of deepest frames commons with other frames. */ private final HashMap commons; /** Instance name. */ private final String name; /** Private constructor used only for the J2000 root frame. * @param name name of the frame */ private Frame(String name) { parent = null; transform = new Transform(); commons = new HashMap(); this.name = name; } public Frame(Frame parent, Transform transform, String name) throws IllegalArgumentException { if (parent == null) { OrekitException.throwIllegalArgumentException("null parent for frame {0}", new Object[] { name }); } this.name = name; this.parent = parent; this.transform = transform; commons = new HashMap(); } /** Get the unique J2000 frame. * @return the unique instance of the J2000 frame */ public static Frame getJ2000() { return LazyJ2000Holder.instance; } /** Get the name. * @return the name */ public String getName() { return this.name; } /** New definition of the java.util toString() method. * @return the name */ public String toString() { return this.name; } /** Get the parent frame * @return parent frame */ public Frame getParent() { return parent; } /** Update the transform from the parent frame to the instance. * @param transform new transform from parent frame to instance */ public void updateTransform(Transform transform) { this.transform = transform; } /** Get the transform from the instance to another frame. * @param destination destination frame to which we want to transform vectors * @param date the date (can be null if it is sure than no date dependent frame is used) * @return transform from the instance to the destination frame * @exception OrekitException if some frame specific error occurs */ public Transform getTransformTo(Frame destination, AbsoluteDate date) throws OrekitException { // common ancestor to both frames in the frames tree final Frame common = findCommon(this, destination); // transform from common to instance Transform commonToInstance = new Transform(); for (Frame frame = this; frame != common; frame = frame.parent) { frame.updateFrame(date); commonToInstance = new Transform(frame.transform, commonToInstance); } // transform from destination up to common Transform commonToDestination = new Transform(); for (Frame frame = destination; frame != common; frame = frame.parent) { frame.updateFrame(date); commonToDestination = new Transform(frame.transform, commonToDestination); } // transform from instance to destination via common return new Transform(commonToInstance.getInverse(), commonToDestination); } /** Update the frame to the given date. * <p>This method is called each time {@link #getTransformTo(Frame, AbsoluteDate)} * is called. Default behaviour is to do nothing. The proper way to build * a date-dependent frame is to extend {@link Frame} and implement this method which * will have to call {@link #updateTransform(Transform)} with the new transform </p> * @param date new value of the date * @exception OrekitException if some frame specific error occurs */ protected void updateFrame(AbsoluteDate date) throws OrekitException { } public void updateTransform(Frame f1, Frame f2, Transform f1Tof2, AbsoluteDate date) throws OrekitException { // make sure f1 is not a child of the instance if (f1.isChildOf(this) || (f1 == this)) { if (f2.isChildOf(this) || (f2 == this)) { throw new FrameAncestorException("both frames {0} and {1} are child of {2}", new Object[] { f1.getName(), f2.getName(), getName() }); } // swap f1 and f2 to make sure the child is f2 final Frame tmp = f1; f1 = f2; f2 = tmp; f1Tof2 = f1Tof2.getInverse(); } else if (! (f2.isChildOf(this) || (f2 == this))) { throw new FrameAncestorException("neither frames {0} nor {1} have {2} as ancestor", new Object[] { f1.getName(), f2.getName(), getName() }); } // rebuild the transform by traveling from parent to self // WITHOUT using the existing this.transform that will be updated final Transform parentToF1 = parent.getTransformTo(f1, date); final Transform f2ToSelf = f2.getTransformTo(this, date); final Transform f1ToSelf = new Transform(f1Tof2, f2ToSelf); updateTransform(new Transform(parentToF1, f1ToSelf)); } /** Find the deepest common ancestor of two frames in the frames tree. * @param from origin frame * @param to destination frame * @return an ancestor frame of both <code>from</code> and <code>to</code> */ private static Frame findCommon(Frame from, Frame to) { // have we already computed the common frame for this pair ? Frame common = (Frame) from.commons.get(to); if (common != null) { return common; } // definitions of the path up to the head tree for each frame final LinkedList pathFrom = from.pathToRoot(); final LinkedList pathTo = to.pathToRoot(); if (pathFrom.isEmpty()||pathTo.contains(from)) { // handle root case and same branch case common = from; } if (pathTo.isEmpty()||pathFrom.contains(to)) { // handle root case and same branch case common = to; } if (common != null) { from.commons.put(to, common); to.commons.put(from, common); return common; } // at this stage pathFrom contains at least one frame Frame lastFrom = (Frame) pathFrom.removeLast(); common = lastFrom; // common must be one of the instance of Frame already defined // at the beginning of the loop pathTo contains at least one frame for (Frame lastTo = (Frame) pathTo.removeLast(); (lastTo == lastFrom) && (lastTo != null) && (lastFrom != null); lastTo = (Frame) (pathTo.isEmpty() ? null : pathTo.removeLast())) { common = lastFrom; lastFrom = (Frame) (pathFrom.isEmpty() ? null : pathFrom.removeLast()); } from.commons.put(to, common); to.commons.put(from, common); return common; } /** Determine if a Frame is a child of another one. * @param potentialAncestor supposed ancestor frame * @return true if the potentialAncestor belongs to the * path from instance to the root frame */ public boolean isChildOf(Frame potentialAncestor) { for (Frame frame = parent; frame != null; frame = frame.parent) { if (frame == potentialAncestor) { return true; } } return false; } /** Get the path from instance frame to the root frame. * @return path from instance to root, excluding instance itself * (empty if instance is root) */ private LinkedList pathToRoot() { final LinkedList path = new LinkedList(); for (Frame frame = parent; frame != null; frame = frame.parent) { path.add(frame); } return path; } /** Frame Type enum for the * {@link Frame#getReferenceFrame(Frame.FrameType, AbsoluteDate)} method. */ public static class FrameType implements Serializable { /** Serializable UID. */ private static final long serialVersionUID = -7876565578577219160L; /** Name of the frame type. */ private final String name; /** Build a frame type. * @param name name of the frame type */ private FrameType(String name) { this.name = name; } /** Return a string representation of this type. * @return string representation of this type (i.e. its name) */ public String toString() { return name; } } /** Get one of the 7 unique reference frames. * Must be one of {@link #VEIS1950}, {@link #ITRF2000A}, {@link #ITRF2000B}, * {@link #TIRF2000A}, {@link #TIRF2000B}, {@link #IRF2000A}, {@link #IRF2000B}. * @param type the frame type. * @param date the current date * @return the selected reference frame singleton. * @exception OrekitException if the nutation model data embedded in the * library cannot be read. */ public static Frame getReferenceFrame(FrameType type, AbsoluteDate date) throws OrekitException { if (type == ITRF2000A) { if (LazyITRF2000AHolder.instance == null) { throw LazyITRF2000AHolder.orekitException; } return LazyITRF2000AHolder.instance; } if (type == ITRF2000B) { if (LazyITRF2000BHolder.instance == null) { throw LazyITRF2000BHolder.orekitException; } return LazyITRF2000BHolder.instance; } if (type == TIRF2000A) { if (LazyTIRF2000AHolder.instance == null) { throw LazyTIRF2000AHolder.orekitException; } return LazyTIRF2000AHolder.instance; } if (type == TIRF2000B) { if (LazyTIRF2000BHolder.instance == null) { throw LazyTIRF2000BHolder.orekitException; } return LazyTIRF2000BHolder.instance; } if (type == IRF2000A) { if (LazyIRF2000AHolder.instance == null) { throw LazyIRF2000AHolder.orekitException; } return LazyIRF2000AHolder.instance; } if (type == IRF2000B) { if (LazyIRF2000BHolder.instance == null) { throw LazyIRF2000BHolder.orekitException; } return LazyIRF2000BHolder.instance; } if (type == VEIS1950) { return LazyVeis1950Holder.instance; } OrekitException.throwIllegalArgumentException("unknown frame type {0}, known types: " + "{1}, {2}, {3}, {4}, {5}, {6} and {7}", new Object[] { type, ITRF2000A, ITRF2000B, TIRF2000A, TIRF2000B, IRF2000A, IRF2000B, VEIS1950 }); // in fact, this is never reached return null; } // We use the Initialization on demand holder idiom to store // the singletons, as it is both thread-safe, efficient (no // synchronization) and works with all version of java. /** Holder for the J2000 frame singleton. */ private static class LazyJ2000Holder { private static final Frame instance = new Frame("J2000"); } /** Holder for the ITRF 2000 A frame singleton. */ private static class LazyITRF2000AHolder { private static final Frame instance; private static final OrekitException orekitException; static { Frame tmpFrame = null; OrekitException tmpException = null; try { tmpFrame = new ITRF2000Frame(LazyTIRF2000AHolder.instance, AbsoluteDate.J2000Epoch, ITRF2000A.name); } catch (OrekitException oe) { tmpException = oe; } instance = tmpFrame; orekitException = tmpException; } } /** Holder for the ITRF 2000 B frame singleton. */ private static class LazyITRF2000BHolder { private static final Frame instance; private static final OrekitException orekitException; static { Frame tmpFrame = null; OrekitException tmpException = null; try { tmpFrame = new ITRF2000Frame(LazyTIRF2000BHolder.instance, AbsoluteDate.J2000Epoch, ITRF2000B.name); } catch (OrekitException oe) { tmpException = oe; } instance = tmpFrame; orekitException = tmpException; } } /** Holder for the TIRF 2000 A frame singleton. */ private static class LazyTIRF2000AHolder { private static final Frame instance; private static final OrekitException orekitException; static { Frame tmpFrame = null; OrekitException tmpException = null; try { tmpFrame = new TIRF2000Frame(LazyIRF2000AHolder.instance, AbsoluteDate.J2000Epoch, TIRF2000A.name); } catch (OrekitException oe) { tmpException = oe; } instance = tmpFrame; orekitException = tmpException; } } /** Holder for the TIRF Frame 2000 B frame singleton. */ private static class LazyTIRF2000BHolder { private static final Frame instance; private static final OrekitException orekitException; static { Frame tmpFrame = null; OrekitException tmpException = null; try { tmpFrame = new TIRF2000Frame(LazyIRF2000BHolder.instance, AbsoluteDate.J2000Epoch, TIRF2000B.name); } catch (OrekitException oe) { tmpException = oe; } instance = tmpFrame; orekitException = tmpException; } } /** Holder for the IRF 2000 A frame singleton. */ private static class LazyIRF2000AHolder { private static final Frame instance; private static final OrekitException orekitException; static { Frame tmpFrame = null; OrekitException tmpException = null; try { tmpFrame = new IRF2000Frame(AbsoluteDate.J2000Epoch, false, IRF2000A.name); } catch (OrekitException oe) { tmpException = oe; } instance = tmpFrame; orekitException = tmpException; } } /** Holder for the IRF 2000 B frame singleton. */ private static class LazyIRF2000BHolder { private static final Frame instance; private static final OrekitException orekitException; static { Frame tmpFrame = null; OrekitException tmpException = null; try { tmpFrame = new IRF2000Frame(AbsoluteDate.J2000Epoch, true, IRF2000B.name); } catch (OrekitException oe) { tmpException = oe; } instance = tmpFrame; orekitException = tmpException; } } /** Holder for the Veis 1950 frame singleton. */ private static class LazyVeis1950Holder { private static final Frame instance; static { final double q1 = -2.01425201682020570e-5; final double q2 = -2.43283773387856897e-3; final double q3 = 5.59078052583013584e-3; final double q0 = Math.sqrt(1.0 - q1 * q1 - q2 * q2 - q3 * q3); instance = new Frame(getJ2000(), new Transform(new Rotation(q0, q1, q2, q3, true)), VEIS1950.name); } } }
package hello; import java.io.IOException; import java.util.Collection; import java.util.List; import javax.servlet.FilterChain; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.core.MediaType; import org.apache.commons.lang3.BooleanUtils; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.core.Ordered; import org.springframework.core.annotation.Order; import org.springframework.security.core.Authentication; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.stereotype.Component; import org.springframework.web.filter.GenericFilterBean; import hello.data.OAuthURIAccess; import hello.data.Role; import hello.data.URIAccessRepository; @Component @Order(Ordered.LOWEST_PRECEDENCE) public class RequestAccessFilter extends GenericFilterBean { @Autowired URIAccessRepository repository; @Override public void doFilter(ServletRequest arg0, ServletResponse arg1, FilterChain chain) throws IOException, ServletException { HttpServletRequest request = (HttpServletRequest) arg0; if (request.getRequestURI().contains("/oauth/token")) { chain.doFilter(arg0, arg1); } else { System.out.println("Accessing other things"); String uri = request.getRequestURI().substring(request.getContextPath().length()); System.out.println(uri); List<OAuthURIAccess> uriConfiguration = repository.getURIConfiguration(uri); System.out.println(uriConfiguration); Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); List<Role> roles = (List<Role>) authentication.getAuthorities(); System.out.println(roles); for (OAuthURIAccess access : uriConfiguration) { boolean userInRole = request.isUserInRole(access.getRole()); System.out.println("User Role is >> " + userInRole); if (BooleanUtils.toBoolean(access.getSecured()) && StringUtils.contains(access.getUri(), request.getRequestURI()) && userInRole) { chain.doFilter(arg0, arg1); } HttpServletResponse response = (HttpServletResponse) arg1; response.setContentType(MediaType.APPLICATION_JSON); response.setStatus(401); response.getWriter().write("{\"error\":\"Unauthorized access\"}"); } } } }
package org.jgroups.tests; import org.jgroups.*; import org.jgroups.util.Util; import org.testng.Assert; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import java.util.LinkedList; import java.util.List; /** * Tests a SEQUENCER based stack: demonstrates race condition where thread#1 * gets seqno, thread#2 gets seqno, thread#2 sends, thread#1 tries to send but * is out of order. * * In order to test total ordering, make sure that messages are sent from * concurrent senders; using one sender will cause NAKACK to FIFO order * the messages and the assertions in this test will still hold true, whether * SEQUENCER is present or not. * */ @Test(groups=Global.STACK_INDEPENDENT,sequential=true) public class SequencerOrderTest { private JChannel ch1, ch2; private MyReceiver r1, r2; static final String GROUP="demo-group"; static final int NUM_MSGS=1000; static final String props="sequencer.xml"; @BeforeMethod void setUp() throws Exception { ch1=new JChannel(props); ch1.connect(GROUP); ch2=new JChannel(props); ch2.connect(GROUP); } @AfterMethod void tearDown() throws Exception { if(ch2 != null) { ch2.close(); ch2 = null; } if(ch1 != null) { ch1.close(); ch1 = null; } } @Test public void testBroadcastSequence() throws Exception { r1=new MyReceiver(ch1.getLocalAddress()); ch1.setReceiver(r1); r2=new MyReceiver(ch2.getLocalAddress()); ch2.setReceiver(r2); // use concurrent senders to send messages to the group Thread thread1 = new Thread() { public void run() { Util.sleep(300); for(int i=1; i <= NUM_MSGS; i++) { try { ch1.send(new Message(null, null, new Integer(i))); } catch (Exception e) { throw new RuntimeException(e); } System.out.print("-- messages sent thread 1: " + i + "/" + NUM_MSGS + "\r"); } } }; Thread thread2 = new Thread() { public void run() { Util.sleep(300); for(int i=1; i <= NUM_MSGS; i++) { try { ch2.send(new Message(null, null, new Integer(i))); } catch (Exception e) { throw new RuntimeException(e); } System.out.print("-- messages sent thread 2: " + i + "/" + NUM_MSGS + "\r"); } } }; thread1.start(); thread2.start(); thread1.join(); thread2.join(); System.out.println(""); System.out.println("-- verifying messages on ch1 and ch2"); verifyNumberOfMessages(NUM_MSGS * 2); verifyMessageOrder(r1.getMsgs()); verifyMessageOrder(r2.getMsgs()); verifySameOrder(); } private void verifyNumberOfMessages(int num_msgs) throws Exception { List<Integer> l1=r1.getMsgs(); List<Integer> l2=r2.getMsgs(); long end_time=System.currentTimeMillis() + 10000; while(System.currentTimeMillis() < end_time) { if(l1.size() >= num_msgs && l2.size() >= num_msgs) break; Util.sleep(500); } System.out.println("l1.size()=" + l1.size() + ", l2.size()=" + l2.size()); Assert.assertEquals(l1.size(), num_msgs, "list 1 should have " + num_msgs + " elements"); Assert.assertEquals(l2.size(), num_msgs, "list 2 should have " + num_msgs + " elements"); } private void verifyMessageOrder(List<Integer> list) throws Exception { List<Integer> l1=r1.getMsgs(); List<Integer> l2=r2.getMsgs(); System.out.println("l1: " + l1); System.out.println("l2: " + l2); int i=1,j=1; for(int count: list) { if(count == i) i++; else if(count == j) j++; else throw new Exception("got " + count + ", but expected " + i + " or " + j); } } private void verifySameOrder() throws Exception { List<Integer> l1=r1.getMsgs(); List<Integer> l2=r2.getMsgs(); int[] arr1=new int[l1.size()]; int[] arr2=new int[l2.size()]; int index=0; for(int el: l1) { arr1[index++]=el; } index=0; for(int el: l2) { arr2[index++]=el; } int count1, count2; for(int i=0; i < arr1.length; i++) { count1=arr1[i]; count2=arr2[i]; if(count1 != count2) throw new Exception("lists are different at index " + i + ": count1=" + count1 + ", count2=" + count2); } } private static class MyReceiver extends ReceiverAdapter { Address local_addr; List<Integer> msgs=new LinkedList<Integer>(); private MyReceiver(Address local_addr) { this.local_addr=local_addr; } public List<Integer> getMsgs() { return msgs; } public void receive(Message msg) { msgs.add((Integer)msg.getObject()); } } }
package org.vitrivr.cineast.core.data; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import com.google.common.collect.ImmutableList; import java.util.List; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; public class LocationTest { private static final float COORDINATES_DELTA = 1e-5f; @Test @DisplayName("Null Island") public void testNullIsland() { assertFixedCoordinates(0f, 0f); } @Test @DisplayName("Positive Values") public void testPositiveValues() { assertFixedCoordinates(47.23f, 7.34f); } @Test @DisplayName("Negative Values") public void testNegativeValues() { assertFixedCoordinates(-13.163077f, -72.5473746f); } @Test @DisplayName("Latitude Clamping") public void testLatitudeClamping() { assertNormalizedCoordinates(90f, 0f, 123f, 0f); assertNormalizedCoordinates(-90f, 0f, -123f, 0f); } @Test @DisplayName("Longitude Wrapping") public void testLongitudeWrapping() { assertNormalizedCoordinates(0f, 0f, 0f, 360f); assertNormalizedCoordinates(0f, -135f, 0f, 225f); assertNormalizedCoordinates(0f, -180f, 0f, 180f); } @Test @DisplayName("Invalid Float Array") public void testInvalidFloatArray() { List<float[]> invalidArrays = ImmutableList .of(new float[] {}, new float[]{ 0f }, new float[]{ 0f, 1f, 2f }); for (float[] array : invalidArrays) { assertThrows(IllegalArgumentException.class, () -> Location.of(array)); } } @Test @DisplayName("NaN Values") public void testNanValues() { assertThrows(IllegalArgumentException.class, () -> Location.of(Float.NaN, 0f)); assertThrows(IllegalArgumentException.class, () -> Location.of(0f, Float.NaN)); } private static void assertFixedCoordinates(float latitude, float longitude) { for (Location location : getTestLocations(latitude, longitude)) { assertLocationEquals(latitude, longitude, location); } } private static void assertNormalizedCoordinates(float expectedLat, float expectedLng, float actualLat, float actualLng) { for (Location location : getTestLocations(actualLat, actualLng)) { assertLocationEquals(expectedLat, expectedLng, location); } } private static List<Location> getTestLocations(float latitude, float longitude) { return ImmutableList.of( Location.of(latitude, longitude), Location.of(new float[] { latitude, longitude }) ); } private static void assertLocationEquals(float expectedLatitude, float expectedLongitude, Location actual) { assertEquals(expectedLatitude, actual.getLatitude(), COORDINATES_DELTA, "Latitude of Location did not match expected"); assertEquals(expectedLongitude, actual.getLongitude(), COORDINATES_DELTA, "Longitude of Location did not match expected"); assertEquals(expectedLatitude, actual.getElement(0), COORDINATES_DELTA, "First element of Location did not match expected"); assertEquals(expectedLongitude, actual.getElement(1), COORDINATES_DELTA, "Second element of Location did not match expected"); } }
package hudson.plugins.git; import com.cloudbees.plugins.credentials.CredentialsMatchers; import com.cloudbees.plugins.credentials.CredentialsProvider; import com.cloudbees.plugins.credentials.common.StandardUsernameCredentials; import com.cloudbees.plugins.credentials.domains.URIRequirementBuilder; import com.google.common.collect.Iterables; import edu.umd.cs.findbugs.annotations.CheckForNull; import edu.umd.cs.findbugs.annotations.NonNull; import hudson.*; import hudson.init.Initializer; import hudson.matrix.MatrixBuild; import hudson.matrix.MatrixRun; import hudson.model.*; import hudson.model.Descriptor.FormException; import hudson.model.Hudson.MasterComputer; import hudson.plugins.git.browser.GitRepositoryBrowser; import hudson.plugins.git.extensions.GitClientConflictException; import hudson.plugins.git.extensions.GitClientType; import hudson.plugins.git.extensions.GitSCMExtension; import hudson.plugins.git.extensions.GitSCMExtensionDescriptor; import hudson.plugins.git.extensions.impl.AuthorInChangelog; import hudson.plugins.git.extensions.impl.BuildChooserSetting; import hudson.plugins.git.extensions.impl.PreBuildMerge; import hudson.plugins.git.opt.PreBuildMergeOptions; import hudson.plugins.git.util.Build; import hudson.plugins.git.util.*; import hudson.remoting.Channel; import hudson.scm.*; import hudson.security.ACL; import hudson.tasks.Builder; import hudson.tasks.Publisher; import hudson.triggers.SCMTrigger; import hudson.util.DescribableList; import hudson.util.FormValidation; import hudson.util.IOException2; import hudson.util.ListBoxModel; import jenkins.model.Jenkins; import net.sf.json.JSONObject; import org.eclipse.jgit.lib.Config; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.transport.RefSpec; import org.eclipse.jgit.transport.RemoteConfig; import org.eclipse.jgit.transport.URIish; import org.jenkinsci.plugins.gitclient.ChangelogCommand; import org.jenkinsci.plugins.gitclient.CheckoutCommand; import org.jenkinsci.plugins.gitclient.CloneCommand; import org.jenkinsci.plugins.gitclient.FetchCommand; import org.jenkinsci.plugins.gitclient.Git; import org.jenkinsci.plugins.gitclient.GitClient; import org.jenkinsci.plugins.gitclient.JGitTool; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.Stapler; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.export.Exported; import javax.servlet.ServletException; import java.io.File; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintStream; import java.io.Serializable; import java.io.Writer; import java.text.MessageFormat; import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; import static hudson.Util.*; import static hudson.init.InitMilestone.JOB_LOADED; import static hudson.init.InitMilestone.PLUGINS_STARTED; import hudson.plugins.git.browser.GithubWeb; import static hudson.scm.PollingResult.*; import hudson.util.IOUtils; import java.util.regex.Matcher; import java.util.regex.Pattern; import static org.apache.commons.lang.StringUtils.isBlank; /** * Git SCM. * * @author Nigel Magnay * @author Andrew Bayer * @author Nicolas Deloof * @author Kohsuke Kawaguchi * ... and many others */ public class GitSCM extends GitSCMBackwardCompatibility { /** * Store a config version so we're able to migrate config on various * functionality upgrades. */ private Long configVersion; /** * All the remote repositories that we know about. */ private List<UserRemoteConfig> userRemoteConfigs; private transient List<RemoteConfig> remoteRepositories; /** * All the branches that we wish to care about building. */ private List<BranchSpec> branches; private boolean doGenerateSubmoduleConfigurations; public String gitTool = null; private GitRepositoryBrowser browser; private Collection<SubmoduleConfig> submoduleCfg; public static final String GIT_BRANCH = "GIT_BRANCH"; public static final String GIT_COMMIT = "GIT_COMMIT"; public static final String GIT_PREVIOUS_COMMIT = "GIT_PREVIOUS_COMMIT"; /** * All the configured extensions attached to this. */ private DescribableList<GitSCMExtension,GitSCMExtensionDescriptor> extensions; public Collection<SubmoduleConfig> getSubmoduleCfg() { return submoduleCfg; } public void setSubmoduleCfg(Collection<SubmoduleConfig> submoduleCfg) { this.submoduleCfg = submoduleCfg; } static private List<UserRemoteConfig> createRepoList(String url) { List<UserRemoteConfig> repoList = new ArrayList<UserRemoteConfig>(); repoList.add(new UserRemoteConfig(url, null, null, null)); return repoList; } /** * A convenience constructor that sets everything to default. * * @param repositoryUrl * Repository URL to clone from. */ public GitSCM(String repositoryUrl) { this( createRepoList(repositoryUrl), Collections.singletonList(new BranchSpec("")), false, Collections.<SubmoduleConfig>emptyList(), null, null, null); } // @Restricted(NoExternalUse.class) // because this keeps changing @DataBoundConstructor public GitSCM( List<UserRemoteConfig> userRemoteConfigs, List<BranchSpec> branches, Boolean doGenerateSubmoduleConfigurations, Collection<SubmoduleConfig> submoduleCfg, GitRepositoryBrowser browser, String gitTool, List<GitSCMExtension> extensions) { // moved from createBranches if (branches == null) { branches = new ArrayList<BranchSpec>(); } if (branches.isEmpty()) { branches.add(new BranchSpec("*/master")); } this.branches = branches; this.userRemoteConfigs = userRemoteConfigs; updateFromUserData(); // TODO: getBrowserFromRequest this.browser = browser; // emulate bindJSON behavior here if (doGenerateSubmoduleConfigurations != null) { this.doGenerateSubmoduleConfigurations = doGenerateSubmoduleConfigurations; } else { this.doGenerateSubmoduleConfigurations = false; } if (submoduleCfg == null) { submoduleCfg = new ArrayList<SubmoduleConfig>(); } this.submoduleCfg = submoduleCfg; this.configVersion = 2L; this.gitTool = gitTool; this.extensions = new DescribableList<GitSCMExtension, GitSCMExtensionDescriptor>(Saveable.NOOP,Util.fixNull(extensions)); getBuildChooser(); // set the gitSCM field. } /** * All the configured extensions attached to this {@link GitSCM}. * * Going forward this is primarily how we'll support esoteric use cases. * * @since 1.EXTENSION */ public DescribableList<GitSCMExtension, GitSCMExtensionDescriptor> getExtensions() { return extensions; } private void updateFromUserData() throws GitException { // do what newInstance used to do directly from the request data try { String[] pUrls = new String[userRemoteConfigs.size()]; String[] repoNames = new String[userRemoteConfigs.size()]; String[] refSpecs = new String[userRemoteConfigs.size()]; for (int i = 0; i < userRemoteConfigs.size(); ++i) { pUrls[i] = userRemoteConfigs.get(i).getUrl(); repoNames[i] = userRemoteConfigs.get(i).getName(); refSpecs[i] = userRemoteConfigs.get(i).getRefspec(); } this.remoteRepositories = DescriptorImpl.createRepositoryConfigurations(pUrls, repoNames, refSpecs); // TODO: replace with new repositories } catch (IOException e1) { throw new GitException("Error creating repositories", e1); } } public Object readResolve() throws IOException { // Migrate data // Default unspecified to v0 if (configVersion == null) { configVersion = 0L; } if (source != null) { remoteRepositories = new ArrayList<RemoteConfig>(); branches = new ArrayList<BranchSpec>(); doGenerateSubmoduleConfigurations = false; List<RefSpec> rs = new ArrayList<RefSpec>(); rs.add(new RefSpec("+refs/heads/*:refs/remotes/origin/*")); remoteRepositories.add(newRemoteConfig("origin", source, rs.toArray(new RefSpec[0]))); if (branch != null) { branches.add(new BranchSpec(branch)); } else { branches.add(new BranchSpec("*/master")); } } if (configVersion < 1 && branches != null) { // Migrate the branch specs from // single * wildcard, to ** wildcard. for (BranchSpec branchSpec : branches) { String name = branchSpec.getName(); name = name.replace("*", "**"); branchSpec.setName(name); } } if (remoteRepositories != null && userRemoteConfigs == null) { userRemoteConfigs = new ArrayList<UserRemoteConfig>(); for(RemoteConfig cfg : remoteRepositories) { // converted as in config.jelly String url = ""; if (cfg.getURIs().size() > 0 && cfg.getURIs().get(0) != null) url = cfg.getURIs().get(0).toPrivateString(); String refspec = ""; if (cfg.getFetchRefSpecs().size() > 0 && cfg.getFetchRefSpecs().get(0) != null) refspec = cfg.getFetchRefSpecs().get(0).toString(); userRemoteConfigs.add(new UserRemoteConfig(url, cfg.getName(), refspec, null)); } } // patch internal objects from user data // if (configVersion == 2) { if (remoteRepositories == null) { // if we don't catch GitException here, the whole job fails to load try { updateFromUserData(); } catch (GitException e) { LOGGER.log(Level.WARNING, "Failed to load SCM data", e); } } if (extensions==null) extensions = new DescribableList<GitSCMExtension, GitSCMExtensionDescriptor>(Saveable.NOOP); readBackExtensionsFromLegacy(); if (choosingStrategy != null && getBuildChooser().getClass()==DefaultBuildChooser.class) { for (BuildChooserDescriptor d : BuildChooser.all()) { if (choosingStrategy.equals(d.getLegacyId())) { try { setBuildChooser(d.clazz.newInstance()); } catch (InstantiationException e) { LOGGER.log(Level.WARNING, "Failed to instantiate the build chooser", e); } catch (IllegalAccessException e) { LOGGER.log(Level.WARNING, "Failed to instantiate the build chooser", e); } } } } getBuildChooser(); // set the gitSCM field. return this; } @Override public GitRepositoryBrowser getBrowser() { return browser; } @Override public RepositoryBrowser<?> guessBrowser() { if (remoteRepositories != null && remoteRepositories.size() == 1) { List<URIish> uris = remoteRepositories.get(0).getURIs(); if (uris.size() == 1) { String uri = uris.get(0).toString(); // TODO make extensible by introducing an abstract GitRepositoryBrowserDescriptor Matcher m = Pattern.compile("(https://github[.]com/[^/]+/[^/]+)[.]git").matcher(uri); if (m.matches()) { return new GithubWeb(m.group(1) + "/"); } m = Pattern.compile("git@github[.]com:([^/]+/[^/]+)[.]git").matcher(uri); if (m.matches()) { return new GithubWeb("https://github.com/" + m.group(1) + "/"); } } } return null; } public boolean isCreateAccountBasedOnEmail() { DescriptorImpl gitDescriptor = getDescriptor(); return (gitDescriptor != null && gitDescriptor.isCreateAccountBasedOnEmail()); } public BuildChooser getBuildChooser() { BuildChooser bc; BuildChooserSetting bcs = getExtensions().get(BuildChooserSetting.class); if (bcs!=null) bc = bcs.getBuildChooser(); else bc = new DefaultBuildChooser(); bc.gitSCM = this; return bc; } public void setBuildChooser(BuildChooser buildChooser) throws IOException { if (buildChooser.getClass()==DefaultBuildChooser.class) { getExtensions().remove(BuildChooserSetting.class); } else { getExtensions().replace(new BuildChooserSetting(buildChooser)); } } /** * Gets the parameter-expanded effective value in the context of the current build. */ public String getParamLocalBranch(Run<?, ?> build) throws IOException, InterruptedException { String branch = getLocalBranch(); // substitute build parameters if available return getParameterString(branch != null ? branch : null, build.getEnvironment()); } /** * Expand parameters in {@link #remoteRepositories} with the parameter values provided in the given build * and return them. * * @return can be empty but never null. */ public List<RemoteConfig> getParamExpandedRepos(Run<?, ?> build) throws IOException, InterruptedException { List<RemoteConfig> expandedRepos = new ArrayList<RemoteConfig>(); EnvVars env = build.getEnvironment(); for (RemoteConfig oldRepo : Util.fixNull(remoteRepositories)) { expandedRepos.add( newRemoteConfig( getParameterString(oldRepo.getName(), env), getParameterString(oldRepo.getURIs().get(0).toPrivateString(), env), getRefSpecs(oldRepo, env).toArray(new RefSpec[0]))); } return expandedRepos; } public RemoteConfig getRepositoryByName(String repoName) { for (RemoteConfig r : getRepositories()) { if (r.getName().equals(repoName)) { return r; } } return null; } @Exported public List<UserRemoteConfig> getUserRemoteConfigs() { return Collections.unmodifiableList(userRemoteConfigs); } public List<RemoteConfig> getRepositories() { // Handle null-value to ensure backwards-compatibility, ie project configuration missing the <repositories/> XML element if (remoteRepositories == null) { return new ArrayList<RemoteConfig>(); } return remoteRepositories; } public String getGitTool() { return gitTool; } public static String getParameterString(String original, EnvVars env) { return env.expand(original); } private List<RefSpec> getRefSpecs(RemoteConfig repo, EnvVars env) { List<RefSpec> refSpecs = new ArrayList<RefSpec>(); for (RefSpec refSpec : repo.getFetchRefSpecs()) { refSpecs.add(new RefSpec(getParameterString(refSpec.toString(), env))); } return refSpecs; } /** * If the configuration is such that we are tracking just one branch of one repository * return that branch specifier (in the form of something like "origin/master" or a SHA1-hash * * Otherwise return null. */ private String getSingleBranch(EnvVars env) { // if we have multiple branches skip to advanced usecase if (getBranches().size() != 1 || getRepositories().size() != 1) { return null; } String branch = getBranches().get(0).getName(); String repository = getRepositories().get(0).getName(); // replace repository wildcard with repository name if (branch.startsWith("*/")) { branch = repository + branch.substring(1); } // if the branch name contains more wildcards then the simple usecase // does not apply and we need to skip to the advanced usecase if (branch.contains("*")) { return null; } // substitute build parameters if available branch = getParameterString(branch, env); // Check for empty string - replace with "**" when seen. if (branch.equals("")) { branch = "**"; } return branch; } @Override public SCMRevisionState calcRevisionsFromBuild(Run<?, ?> abstractBuild, FilePath workspace, Launcher launcher, TaskListener taskListener) throws IOException, InterruptedException { return SCMRevisionState.NONE; } @Override public boolean requiresWorkspaceForPolling() { for (GitSCMExtension ext : getExtensions()) { if (ext.requiresWorkspaceForPolling()) return true; } return getSingleBranch(new EnvVars()) == null; } @Override public PollingResult compareRemoteRevisionWith(Job<?, ?> project, Launcher launcher, FilePath workspace, final TaskListener listener, SCMRevisionState baseline) throws IOException, InterruptedException { try { return compareRemoteRevisionWithImpl( project, launcher, workspace, listener); } catch (GitException e){ throw new IOException2(e); } } private static Node workspaceToNode(FilePath workspace) { Jenkins j = Jenkins.getInstance(); if (workspace.isRemote()) { for (Computer c : j.getComputers()) { if (c.getChannel() == workspace.getChannel()) { Node n = c.getNode(); if (n != null) { return n; } } } } return j; } private PollingResult compareRemoteRevisionWithImpl(Job<?, ?> project, Launcher launcher, FilePath workspace, final TaskListener listener) throws IOException, InterruptedException { // Poll for changes. Are there any unbuilt revisions that Hudson ought to build ? listener.getLogger().println("Using strategy: " + getBuildChooser().getDisplayName()); final Run lastBuild = project.getLastBuild(); if (lastBuild == null) { // If we've never been built before, well, gotta build! listener.getLogger().println("[poll] No previous build, so forcing an initial build."); return BUILD_NOW; } final BuildData buildData = fixNull(getBuildData(lastBuild)); if (buildData.lastBuild != null) { listener.getLogger().println("[poll] Last Built Revision: " + buildData.lastBuild.revision); } final String singleBranch = getSingleBranch(lastBuild.getEnvironment()); // fast remote polling needs a single branch and an existing last build if (!requiresWorkspaceForPolling() && buildData.lastBuild != null && buildData.lastBuild.getMarked() != null) { // FIXME this should not be a specific case, but have BuildChooser tell us if it can poll without workspace. final EnvVars environment = project instanceof AbstractProject ? GitUtils.getPollEnvironment((AbstractProject) project, workspace, launcher, listener, false) : new EnvVars(); GitClient git = createClient(listener, environment, project, Jenkins.getInstance(), null); String gitRepo = getParamExpandedRepos(lastBuild).get(0).getURIs().get(0).toString(); ObjectId head = git.getHeadRev(gitRepo, getBranches().get(0).getName()); if (head != null && buildData.lastBuild.getMarked().getSha1().equals(head)) { return NO_CHANGES; } else { return BUILD_NOW; } } final EnvVars environment = project instanceof AbstractProject ? GitUtils.getPollEnvironment((AbstractProject) project, workspace, launcher, listener) : new EnvVars(); FilePath workingDirectory = workingDirectory(project,workspace,environment,listener); // (Re)build if the working directory doesn't exist if (workingDirectory == null || !workingDirectory.exists()) { return BUILD_NOW; } GitClient git = createClient(listener, environment, project, workspaceToNode(workspace), workingDirectory); if (git.hasGitRepo()) { // Repo is there - do a fetch listener.getLogger().println("Fetching changes from the remote Git repositories"); // Fetch updates for (RemoteConfig remoteRepository : getParamExpandedRepos(lastBuild)) { fetchFrom(git, listener, remoteRepository); } listener.getLogger().println("Polling for changes in"); Collection<Revision> candidates = getBuildChooser().getCandidateRevisions( true, singleBranch, git, listener, buildData, new BuildChooserContextImpl(project, null, environment)); for (Revision c : candidates) { if (!isRevExcluded(git, c, listener, buildData)) { return PollingResult.SIGNIFICANT; } } return NO_CHANGES; } else { listener.getLogger().println("No Git repository yet, an initial checkout is required"); return PollingResult.SIGNIFICANT; } } /** * Allows {@link Builder}s and {@link Publisher}s to access a configured {@link GitClient} object to * perform additional git operations. */ public GitClient createClient(TaskListener listener, EnvVars environment, Run<?,?> build, FilePath workspace) throws IOException, InterruptedException { FilePath ws = workingDirectory(build.getParent(), workspace, environment, listener); ws.mkdirs(); // ensure it exists return createClient(listener,environment, build.getParent(), workspaceToNode(workspace), ws); } /*package*/ GitClient createClient(TaskListener listener, EnvVars environment, Job project, Node n, FilePath ws) throws IOException, InterruptedException { String gitExe = getGitExe(n, listener); Git git = Git.with(listener, environment).in(ws).using(gitExe); GitClient c = git.getClient(); for (GitSCMExtension ext : extensions) { c = ext.decorate(this,c); } for (UserRemoteConfig uc : getUserRemoteConfigs()) { if (uc.getCredentialsId() != null) { String url = uc.getUrl(); StandardUsernameCredentials credentials = CredentialsMatchers .firstOrNull( CredentialsProvider.lookupCredentials(StandardUsernameCredentials.class, project, ACL.SYSTEM, URIRequirementBuilder.fromUri(url).build()), CredentialsMatchers.allOf(CredentialsMatchers.withId(uc.getCredentialsId()), GitClient.CREDENTIALS_MATCHER)); if (credentials != null) { c.addCredentials(url, credentials); } } } // TODO add default credentials return c; } private BuildData fixNull(BuildData bd) { return bd != null ? bd : new BuildData(getScmName(), getUserRemoteConfigs()) /*dummy*/; } /** * Fetch information from a particular remote repository. * * @param git * @param listener * @param remoteRepository * @throws InterruptedException * @throws IOException */ private void fetchFrom(GitClient git, TaskListener listener, RemoteConfig remoteRepository) throws InterruptedException, IOException { boolean first = true; for (URIish url : remoteRepository.getURIs()) { try { if (first) { git.setRemoteUrl(remoteRepository.getName(), url.toPrivateASCIIString()); first = false; } else { git.addRemoteUrl(remoteRepository.getName(), url.toPrivateASCIIString()); } FetchCommand fetch = git.fetch_().from(url, remoteRepository.getFetchRefSpecs()); for (GitSCMExtension extension : extensions) { extension.decorateFetchCommand(this, git, listener, fetch); } fetch.execute(); } catch (GitException ex) { throw new GitException("Failed to fetch from "+url.toString(), ex); } } } private RemoteConfig newRemoteConfig(String name, String refUrl, RefSpec... refSpec) { try { Config repoConfig = new Config(); // Make up a repo config from the request parameters repoConfig.setString("remote", name, "url", refUrl); List<String> str = new ArrayList<String>(); if(refSpec != null && refSpec.length > 0) for (RefSpec rs: refSpec) str.add(rs.toString()); repoConfig.setStringList("remote", name, "fetch", str); return RemoteConfig.getAllRemoteConfigs(repoConfig).get(0); } catch (Exception ex) { throw new GitException("Error trying to create JGit configuration", ex); } } public GitTool resolveGitTool(TaskListener listener) { if (gitTool == null) return GitTool.getDefaultInstallation(); GitTool git = Jenkins.getInstance().getDescriptorByType(GitTool.DescriptorImpl.class).getInstallation(gitTool); if (git == null) { listener.getLogger().println("selected Git installation does not exists. Using Default"); git = GitTool.getDefaultInstallation(); } return git; } public String getGitExe(Node builtOn, TaskListener listener) { return getGitExe(builtOn, null, listener); } /** * Exposing so that we can get this from GitPublisher. */ public String getGitExe(Node builtOn, EnvVars env, TaskListener listener) { GitClientType client = GitClientType.ANY; for (GitSCMExtension ext : extensions) { try { client = client.combine(ext.getRequiredClient()); } catch (GitClientConflictException e) { throw new RuntimeException(ext.getDescriptor().getDisplayName() + " extended Git behavior is incompatible with other behaviors"); } } if (client == GitClientType.JGIT) return JGitTool.MAGIC_EXENAME; GitTool tool = resolveGitTool(listener); if (builtOn != null) { try { tool = tool.forNode(builtOn, listener); } catch (IOException e) { listener.getLogger().println("Failed to get git executable"); } catch (InterruptedException e) { listener.getLogger().println("Failed to get git executable"); } } if (env != null) { tool = tool.forEnvironment(env); } return tool.getGitExe(); } /** * Web-bound method to let people look up a build by their SHA1 commit. */ public AbstractBuild<?,?> getBySHA1(String sha1) { AbstractProject<?,?> p = Stapler.getCurrentRequest().findAncestorObject(AbstractProject.class); for (AbstractBuild b : p.getBuilds()) { BuildData d = b.getAction(BuildData.class); if (d!=null && d.lastBuild!=null) { Build lb = d.lastBuild; if (lb.isFor(sha1)) return b; } } return null; } /*package*/ static class BuildChooserContextImpl implements BuildChooserContext, Serializable { final Job project; final Run build; final EnvVars environment; BuildChooserContextImpl(Job project, Run build, EnvVars environment) { this.project = project; this.build = build; this.environment = environment; } public <T> T actOnBuild(ContextCallable<Run<?,?>, T> callable) throws IOException, InterruptedException { return callable.invoke(build,Hudson.MasterComputer.localChannel); } public <T> T actOnProject(ContextCallable<Job<?,?>, T> callable) throws IOException, InterruptedException { return callable.invoke(project, MasterComputer.localChannel); } public Run<?, ?> getBuild() { return build; } public EnvVars getEnvironment() { return environment; } private Object writeReplace() { return Channel.current().export(BuildChooserContext.class,new BuildChooserContext() { public <T> T actOnBuild(ContextCallable<Run<?,?>, T> callable) throws IOException, InterruptedException { return callable.invoke(build,Channel.current()); } public <T> T actOnProject(ContextCallable<Job<?,?>, T> callable) throws IOException, InterruptedException { return callable.invoke(project,Channel.current()); } public Run<?, ?> getBuild() { return build; } public EnvVars getEnvironment() { return environment; } }); } } /** * Determines the commit to be built in this round, updating the working tree accordingly, * and return the information about the selected commit. * * <p> * For robustness, this method shouldn't assume too much about the state of the working tree when this method * is called. In a general case, a working tree is a left-over from the previous build, so it can be quite * messed up (such as HEAD pointing to a random branch.) It is expected that this method brings it back * to the predictable clean state by the time this method returns. */ private @NonNull Build determineRevisionToBuild(final Run build, final BuildData buildData, final EnvVars environment, final GitClient git, final TaskListener listener) throws IOException, InterruptedException { PrintStream log = listener.getLogger(); // every MatrixRun should build the exact same commit ID if (build instanceof MatrixRun) { MatrixBuild parentBuild = ((MatrixRun) build).getParentBuild(); if (parentBuild != null) { BuildData parentBuildData = getBuildData(parentBuild); if (parentBuildData != null) { Build lastBuild = parentBuildData.lastBuild; if (lastBuild!=null) return lastBuild; } } } // parameter forcing the commit ID to build final RevisionParameterAction rpa = build.getAction(RevisionParameterAction.class); if (rpa != null) return new Build(rpa.toRevision(git), build.getNumber(), null); final String singleBranch = environment.expand( getSingleBranch(environment) ); final BuildChooserContext context = new BuildChooserContextImpl(build.getParent(), build, environment); Collection<Revision> candidates = getBuildChooser().getCandidateRevisions( false, singleBranch, git, listener, buildData, context); if (candidates.size() == 0) { // getBuildCandidates should make the last item the last build, so a re-build // will build the last built thing. throw new AbortException("Couldn't find any revision to build. Verify the repository and branch configuration for this job."); } if (candidates.size() > 1) { log.println("Multiple candidate revisions"); Job<?, ?> job = build.getParent(); if (job instanceof AbstractProject) { AbstractProject project = (AbstractProject) job; if (!project.isDisabled()) { log.println("Scheduling another build to catch up with " + project.getFullDisplayName()); if (!project.scheduleBuild(0, new SCMTrigger.SCMTriggerCause())) { log.println("WARNING: multiple candidate revisions, but unable to schedule build of " + project.getFullDisplayName()); } } } } Revision rev = candidates.iterator().next(); Revision marked = rev; for (GitSCMExtension ext : extensions) { rev = ext.decorateRevisionToBuild(this,build,git,listener,rev); } return new Build(marked, rev, build.getNumber(), null); } /** * Retrieve Git objects from the specified remotes by doing the likes of clone/fetch/pull/etc. * * By the end of this method, remote refs are updated to include all the commits found in the remote servers. */ private void retrieveChanges(Run build, GitClient git, TaskListener listener) throws IOException, InterruptedException { final PrintStream log = listener.getLogger(); List<RemoteConfig> repos = getParamExpandedRepos(build); if (repos.isEmpty()) return; // defensive check even though this is an invalid configuration if (git.hasGitRepo()) { // It's an update if (repos.size() == 1) log.println("Fetching changes from the remote Git repository"); else log.println(MessageFormat.format("Fetching changes from {0} remote Git repositories", repos.size())); } else { log.println("Cloning the remote Git repository"); RemoteConfig rc = repos.get(0); try { CloneCommand cmd = git.clone_().url(rc.getURIs().get(0).toPrivateString()).repositoryName(rc.getName()); for (GitSCMExtension ext : extensions) { ext.decorateCloneCommand(this, build, git, listener, cmd); } cmd.execute(); } catch (GitException ex) { ex.printStackTrace(listener.error("Error cloning remote repo '%s'", rc.getName())); throw new AbortException(); } } for (RemoteConfig remoteRepository : repos) { fetchFrom(git, listener, remoteRepository); } } @Override public void checkout(Run<?, ?> build, Launcher launcher, FilePath workspace, TaskListener listener, File changelogFile, SCMRevisionState baseline) throws IOException, InterruptedException { if (VERBOSE) listener.getLogger().println("Using strategy: " + getBuildChooser().getDisplayName()); BuildData previousBuildData = getBuildData(build.getPreviousBuild()); // read only BuildData buildData = copyBuildData(build.getPreviousBuild()); build.addAction(buildData); if (VERBOSE && buildData.lastBuild != null) { listener.getLogger().println("Last Built Revision: " + buildData.lastBuild.revision); } EnvVars environment = build.getEnvironment(listener); GitClient git = createClient(listener, environment, build, workspace); for (GitSCMExtension ext : extensions) { ext.beforeCheckout(this, build, git, listener); } retrieveChanges(build, git, listener); Build revToBuild = determineRevisionToBuild(build, buildData, environment, git, listener); environment.put(GIT_COMMIT, revToBuild.revision.getSha1String()); Branch branch = Iterables.getFirst(revToBuild.revision.getBranches(),null); if (branch!=null) // null for a detached HEAD environment.put(GIT_BRANCH, branch.getName()); listener.getLogger().println("Checking out " + revToBuild.revision); CheckoutCommand checkoutCommand = git.checkout().branch(getParamLocalBranch(build)).ref(revToBuild.revision.getSha1String()).deleteBranchIfExist(true); for (GitSCMExtension ext : this.getExtensions()) { ext.decorateCheckoutCommand(this, build, git, listener, checkoutCommand); } try { checkoutCommand.execute(); } catch(GitLockFailedException e) { // Rethrow IOException so the retry will be able to catch it throw new IOException("Could not checkout " + revToBuild.revision.getSha1String(), e); } buildData.saveBuild(revToBuild); build.addAction(new GitTagAction(build, workspace, buildData)); if (changelogFile != null) { computeChangeLog(git, revToBuild.revision, listener, previousBuildData, new FilePath(changelogFile), new BuildChooserContextImpl(build.getParent(), build, environment)); } for (GitSCMExtension ext : extensions) { ext.onCheckoutCompleted(this, build, git,listener); } } private void computeChangeLog(GitClient git, Revision revToBuild, TaskListener listener, BuildData previousBuildData, FilePath changelogFile, BuildChooserContext context) throws IOException, InterruptedException { Writer out = new OutputStreamWriter(changelogFile.write(),"UTF-8"); boolean executed = false; ChangelogCommand changelog = git.changelog(); changelog.includes(revToBuild.getSha1()); try { boolean exclusion = false; for (Branch b : revToBuild.getBranches()) { Build lastRevWas = getBuildChooser().prevBuildForChangelog(b.getName(), previousBuildData, git, context); if (lastRevWas != null && git.isCommitInRepo(lastRevWas.getSHA1())) { changelog.excludes(lastRevWas.getSHA1()); exclusion = true; } } if (!exclusion) { // this is the first time we are building this branch, so there's no base line to compare against. // if we force the changelog, it'll contain all the changes in the repo, which is not what we want. listener.getLogger().println("First time build. Skipping changelog."); } else { changelog.to(out).max(MAX_CHANGELOG).execute(); executed = true; } } catch (GitException ge) { ge.printStackTrace(listener.error("Unable to retrieve changeset")); } finally { if (!executed) changelog.abort(); IOUtils.closeQuietly(out); } } public void buildEnvVars(AbstractBuild<?, ?> build, java.util.Map<String, String> env) { super.buildEnvVars(build, env); Revision rev = fixNull(getBuildData(build)).getLastBuiltRevision(); if (rev!=null) { Branch branch = Iterables.getFirst(rev.getBranches(), null); if (branch!=null) { env.put(GIT_BRANCH, branch.getName()); String prevCommit = getLastBuiltCommitOfBranch(build, branch); if (prevCommit != null) { env.put(GIT_PREVIOUS_COMMIT, prevCommit); } } env.put(GIT_COMMIT, fixEmpty(rev.getSha1String())); } if (userRemoteConfigs.size()==1){ env.put("GIT_URL", userRemoteConfigs.get(0).getUrl()); } else { int count=1; for(UserRemoteConfig config:userRemoteConfigs) { env.put("GIT_URL_"+count, config.getUrl()); count++; } } getDescriptor().populateEnvironmentVariables(env); for (GitSCMExtension ext : extensions) { ext.populateEnvironmentVariables(this, env); } } private String getLastBuiltCommitOfBranch(AbstractBuild<?, ?> build, Branch branch) { String prevCommit = null; if (build.getPreviousBuiltBuild() != null) { final Build lastBuildOfBranch = fixNull(getBuildData(build.getPreviousBuiltBuild())).getLastBuildOfBranch(branch.getName()); if (lastBuildOfBranch != null) { Revision previousRev = lastBuildOfBranch.getRevision(); if (previousRev != null) { prevCommit = previousRev.getSha1String(); } } } return prevCommit; } @Override public ChangeLogParser createChangeLogParser() { return new GitChangeLogParser(getExtensions().get(AuthorInChangelog.class)!=null); } @Extension public static final class DescriptorImpl extends SCMDescriptor<GitSCM> { private String gitExe; private String globalConfigName; private String globalConfigEmail; private boolean createAccountBasedOnEmail; // private GitClientType defaultClientType = GitClientType.GITCLI; public DescriptorImpl() { super(GitSCM.class, GitRepositoryBrowser.class); load(); } public String getDisplayName() { return "Git"; } @Override public boolean isApplicable(Job project) { return true; } public List<GitSCMExtensionDescriptor> getExtensionDescriptors() { return GitSCMExtensionDescriptor.all(); } public boolean showGitToolOptions() { return Jenkins.getInstance().getDescriptorByType(GitTool.DescriptorImpl.class).getInstallations().length>1; } /** * Lists available toolinstallations. * @return list of available git tools */ public List<GitTool> getGitTools() { GitTool[] gitToolInstallations = Hudson.getInstance().getDescriptorByType(GitTool.DescriptorImpl.class).getInstallations(); return Arrays.asList(gitToolInstallations); } public ListBoxModel doFillGitToolItems() { ListBoxModel r = new ListBoxModel(); for (GitTool git : getGitTools()) { r.add(git.getName()); } return r; } /** * Path to git executable. * @deprecated * @see GitTool */ @Deprecated public String getGitExe() { return gitExe; } /** * Global setting to be used in call to "git config user.name". */ public String getGlobalConfigName() { return fixEmptyAndTrim(globalConfigName); } public void setGlobalConfigName(String globalConfigName) { this.globalConfigName = globalConfigName; } /** * Global setting to be used in call to "git config user.email". */ public String getGlobalConfigEmail() { return fixEmptyAndTrim(globalConfigEmail); } public void setGlobalConfigEmail(String globalConfigEmail) { this.globalConfigEmail = globalConfigEmail; } public boolean isCreateAccountBasedOnEmail() { return createAccountBasedOnEmail; } public void setCreateAccountBasedOnEmail(boolean createAccountBasedOnEmail) { this.createAccountBasedOnEmail = createAccountBasedOnEmail; } /** * Old configuration of git executable - exposed so that we can * migrate this setting to GitTool without deprecation warnings. */ public String getOldGitExe() { return gitExe; } /** * Determine the browser from the scmData contained in the {@link StaplerRequest}. * * @param scmData * @return browser based on request scmData */ private GitRepositoryBrowser getBrowserFromRequest(final StaplerRequest req, final JSONObject scmData) { if (scmData.containsKey("browser")) { return req.bindJSON(GitRepositoryBrowser.class, scmData.getJSONObject("browser")); } else { return null; } } public static List<RemoteConfig> createRepositoryConfigurations(String[] urls, String[] repoNames, String[] refs) throws IOException { List<RemoteConfig> remoteRepositories; Config repoConfig = new Config(); // Make up a repo config from the request parameters String[] names = repoNames; names = GitUtils.fixupNames(names, urls); for (int i = 0; i < names.length; i++) { String url = urls[i]; if (url == null) { continue; } String name = names[i]; name = name.replace(' ', '_'); if (isBlank(refs[i])) { /** * Fill in the environment variables for launching git */ public void populateEnvironmentVariables(Map<String,String> env) { String name = getGlobalConfigName(); if (name!=null) { env.put("GIT_COMMITTER_NAME", name); env.put("GIT_AUTHOR_NAME", name); } String email = getGlobalConfigEmail(); if (email!=null) { env.put("GIT_COMMITTER_EMAIL", email); env.put("GIT_AUTHOR_EMAIL", email); } } // public GitClientType getDefaultClientType() { // return defaultClientType; // public void setDefaultClientType(String defaultClientType) { // this.defaultClientType = GitClientType.valueOf(defaultClientType); } private static final long serialVersionUID = 1L; public boolean isDoGenerateSubmoduleConfigurations() { return this.doGenerateSubmoduleConfigurations; } @Exported public List<BranchSpec> getBranches() { return branches; } @Override public String getKey() { String name = getScmName(); if (name != null) { return name; } StringBuilder b = new StringBuilder("git"); for (RemoteConfig cfg : getRepositories()) { for (URIish uri : cfg.getURIs()) { b.append(' ').append(uri.toString()); } } return b.toString(); } /** * Use {@link PreBuildMerge}. */ @Exported @Deprecated public PreBuildMergeOptions getMergeOptions() throws FormException { return DescriptorImpl.createMergeOptions(getUserMergeOptions(), remoteRepositories); } private boolean isRelevantBuildData(BuildData bd) { for(UserRemoteConfig c : getUserRemoteConfigs()) { if(bd.hasBeenReferenced(c.getUrl())) { return true; } } return false; } /** * @deprecated */ public BuildData getBuildData(Run build, boolean clone) { return clone ? copyBuildData(build) : getBuildData(build); } /** * Like {@link #getBuildData(Run)}, but copy the data into a new object, * which is used as the first step for updating the data for the next build. */ public BuildData copyBuildData(Run build) { BuildData base = getBuildData(build); if (base==null) return new BuildData(getScmName(), getUserRemoteConfigs()); else return base.clone(); } /** * Find the build log (BuildData) recorded with the last build that completed. BuildData * may not be recorded if an exception occurs in the plugin logic. * * @param build * @return the last recorded build data */ public @CheckForNull BuildData getBuildData(Run build) { BuildData buildData = null; while (build != null) { List<BuildData> buildDataList = build.getActions(BuildData.class); for (BuildData bd : buildDataList) { if (bd != null && isRelevantBuildData(bd)) { buildData = bd; break; } } if (buildData != null) { break; } build = build.getPreviousBuild(); } return buildData; } /** * Given the workspace, gets the working directory, which will be the workspace * if no relative target dir is specified. Otherwise, it'll be "workspace/relativeTargetDir". * * @param workspace * @return working directory or null if workspace is null */ protected FilePath workingDirectory(Job<?,?> context, FilePath workspace, EnvVars environment, TaskListener listener) throws IOException, InterruptedException { // JENKINS-10880: workspace can be null if (workspace == null) { return null; } for (GitSCMExtension ext : extensions) { FilePath r = ext.getWorkingDirectory(this, context, workspace, environment, listener); if (r!=null) return r; } return workspace; } /** * Given a Revision "r", check whether the list of revisions "COMMITS_WE_HAVE_BUILT..r" are to be entirely excluded given the exclusion rules * * @param git GitClient object * @param r Revision object * @param listener * @return true if any exclusion files are matched, false otherwise. */ private boolean isRevExcluded(GitClient git, Revision r, TaskListener listener, BuildData buildData) throws IOException, InterruptedException { try { List<String> revShow; if (buildData != null && buildData.lastBuild != null) { revShow = git.showRevision(buildData.lastBuild.revision.getSha1(), r.getSha1()); } else { revShow = git.showRevision(r.getSha1()); } revShow.add("commit "); // sentinel value int start=0, idx=0; for (String line : revShow) { if (line.startsWith("commit ") && idx!=0) { GitChangeSet change = new GitChangeSet(revShow.subList(start,idx), getExtensions().get(AuthorInChangelog.class)!=null); Boolean excludeThisCommit=null; for (GitSCMExtension ext : extensions) { excludeThisCommit = ext.isRevExcluded(this, git, change, listener, buildData); if (excludeThisCommit!=null) break; } if (excludeThisCommit==null || !excludeThisCommit) return false; // this sequence of commits have one commit that we want to build start = idx; } idx++; } assert start==revShow.size()-1; // every commit got excluded return true; } catch (GitException e) { e.printStackTrace(listener.error("Failed to determine if we want to exclude " + r.getSha1String())); return false; // for historical reason this is not considered a fatal error. } } @Initializer(after=PLUGINS_STARTED) public static void onLoaded() { DescriptorImpl desc = Jenkins.getInstance().getDescriptorByType(DescriptorImpl.class); if (desc.getOldGitExe() != null) { String exe = desc.getOldGitExe(); String defaultGit = GitTool.getDefaultInstallation().getGitExe(); if (exe.equals(defaultGit)) { return; } System.err.println("[WARNING] you're using deprecated gitexe attribute to configure git plugin. Use Git installations"); } } @Initializer(before=JOB_LOADED) public static void configureXtream() { Run.XSTREAM.registerConverter(new ObjectIdConverter()); Items.XSTREAM.registerConverter(new RemoteConfigConverter(Items.XSTREAM)); Items.XSTREAM.alias("org.spearce.jgit.transport.RemoteConfig", RemoteConfig.class); } private static final Logger LOGGER = Logger.getLogger(GitSCM.class.getName()); /** * Set to true to enable more logging to build's {@link TaskListener}. * Used by various classes in this package. */ public static boolean VERBOSE = Boolean.getBoolean(GitSCM.class.getName() + ".verbose"); /** * To avoid pointlessly large changelog, we'll limit the number of changes up to this. */ public static final int MAX_CHANGELOG = Integer.getInteger(GitSCM.class.getName()+".maxChangelog",1024); }
package io.druid.segment.realtime.plumber; import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.metamx.common.IAE; import com.metamx.common.ISE; import com.metamx.common.logger.Logger; import io.druid.data.input.InputRow; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.indexing.DataSchema; import io.druid.segment.indexing.RealtimeTuningConfig; import io.druid.segment.realtime.FireHydrant; import io.druid.timeline.DataSegment; import org.joda.time.Interval; import javax.annotation.Nullable; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; public class Sink implements Iterable<FireHydrant> { private static final Logger log = new Logger(Sink.class); private volatile FireHydrant currHydrant; private final transient Object hydrantLock = new Object(); private final Interval interval; private final DataSchema schema; private final RealtimeTuningConfig config; private final String version; private final CopyOnWriteArrayList<FireHydrant> hydrants = new CopyOnWriteArrayList<FireHydrant>(); public Sink( Interval interval, DataSchema schema, RealtimeTuningConfig config, String version ) { this.schema = schema; this.config = config; this.interval = interval; this.version = version; makeNewCurrIndex(interval.getStartMillis(), schema); } public Sink( Interval interval, DataSchema schema, RealtimeTuningConfig config, String version, List<FireHydrant> hydrants ) { this.schema = schema; this.config = config; this.interval = interval; this.version = version; for (int i = 0; i < hydrants.size(); ++i) { final FireHydrant hydrant = hydrants.get(i); if (hydrant.getCount() != i) { throw new ISE("hydrant[%s] not the right count[%s]", hydrant, i); } } this.hydrants.addAll(hydrants); makeNewCurrIndex(interval.getStartMillis(), schema); } public String getVersion() { return version; } public Interval getInterval() { return interval; } public FireHydrant getCurrHydrant() { return currHydrant; } public int add(InputRow row) { if (currHydrant == null) { throw new IAE("No currHydrant but given row[%s]", row); } synchronized (hydrantLock) { IncrementalIndex index = currHydrant.getIndex(); if (index == null) { return -1; // the hydrant was swapped without being replaced } return index.add(row); } } public boolean isEmpty() { synchronized (hydrantLock) { return hydrants.size() == 1 && currHydrant.getIndex().isEmpty(); } } /** * If currHydrant is A, creates a new index B, sets currHydrant to B and returns A. * * @return the current index after swapping in a new one */ public FireHydrant swap() { return makeNewCurrIndex(interval.getStartMillis(), schema); } public boolean swappable() { synchronized (hydrantLock) { return currHydrant.getIndex() != null && currHydrant.getIndex().size() != 0; } } public DataSegment getSegment() { return new DataSegment( schema.getDataSource(), interval, version, ImmutableMap.<String, Object>of(), Lists.<String>newArrayList(), Lists.transform( Arrays.asList(schema.getAggregators()), new Function<AggregatorFactory, String>() { @Override public String apply(@Nullable AggregatorFactory input) { return input.getName(); } } ), config.getShardSpec(), null, 0 ); } private FireHydrant makeNewCurrIndex(long minTimestamp, DataSchema schema) { IncrementalIndex newIndex = new IncrementalIndex( new IncrementalIndexSchema.Builder() .withMinTimestamp(minTimestamp) .withQueryGranularity(schema.getGranularitySpec().getQueryGranularity()) .withSpatialDimensions(schema.getParser()) .withMetrics(schema.getAggregators()) .build() ); final FireHydrant old; synchronized (hydrantLock) { old = currHydrant; currHydrant = new FireHydrant(newIndex, hydrants.size(), getSegment().getIdentifier()); hydrants.add(currHydrant); } return old; } @Override public Iterator<FireHydrant> iterator() { return Iterators.filter( hydrants.iterator(), new Predicate<FireHydrant>() { @Override public boolean apply(@Nullable FireHydrant input) { final IncrementalIndex index = input.getIndex(); return index == null || index.size() != 0; } } ); } @Override public String toString() { return "Sink{" + "interval=" + interval + ", schema=" + schema + '}'; } }
package duro.reflang; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Hashtable; import java.util.Stack; import java.util.function.Consumer; import java.util.function.Supplier; import java.util.stream.Collectors; import org.antlr.v4.runtime.ANTLRErrorListener; import org.antlr.v4.runtime.ANTLRInputStream; import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.CommonTokenStream; import org.antlr.v4.runtime.Parser; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.RecognitionException; import org.antlr.v4.runtime.Recognizer; import org.antlr.v4.runtime.atn.ATNConfigSet; import org.antlr.v4.runtime.atn.PredictionMode; import org.antlr.v4.runtime.dfa.DFA; import org.antlr.v4.runtime.tree.ParseTree; import duro.debugging.Debug; import duro.reflang.antlr4_2.DuroBaseListener; import duro.reflang.antlr4_2.DuroLexer; import duro.reflang.antlr4_2.DuroListener; import duro.reflang.antlr4_2.DuroParser; import duro.reflang.antlr4_2.DuroParser.BinaryMessageContext; import duro.reflang.antlr4_2.DuroParser.BinaryOperatorContext; import duro.reflang.antlr4_2.DuroParser.IntegerContext; import duro.reflang.antlr4_2.DuroParser.MultiArgMessageArgContext; import duro.reflang.antlr4_2.DuroParser.MultiArgMessageContext; import duro.reflang.antlr4_2.DuroParser.ProgramContext; import duro.reflang.antlr4_2.DuroParser.RootExpressionContext; import duro.reflang.antlr4_2.DuroParser.StringContext; import duro.runtime.CustomProcess; import duro.runtime.Instruction; import duro.runtime.Selector; public class Compiler_NEW { private Hashtable<Selector, PrimitiveGeneratorFactory> primitiveMap = new Hashtable<Selector, PrimitiveGeneratorFactory>(); private MessageCollector errors = new MessageCollector(); private ArrayList<Runnable> endHandlers = new ArrayList<Runnable>(); public Compiler_NEW() { primitiveMap.put(Selector.get("write", 1), new PrimitiveGeneratorFactory.ConstInstruction(new Instruction(Instruction.OPCODE_SP_WRITE), false)); } private void appendError(ParserRuleContext ctx, String message) { errors.appendMessage(ctx.getStart().getLine(), ctx.getStart().getCharPositionInLine(), message); } private void appendError(int line, int charPositionInLine, String message) { errors.appendMessage(line, charPositionInLine, message); } private void appendErrors(MessageCollector errors) { this.errors.appendMessages(errors); } public boolean hasErrors() { return errors.hasMessages(); } public void printErrors() { errors.printMessages(); } public CustomProcess compile(InputStream sourceCode) throws IOException { CharStream charStream = new ANTLRInputStream(sourceCode); DuroLexer lexer = new DuroLexer(charStream); CommonTokenStream tokenStream = new CommonTokenStream(lexer); DuroParser parser = new DuroParser(tokenStream); parser.removeErrorListeners(); parser.addErrorListener(new ANTLRErrorListener() { @Override public void syntaxError(Recognizer<?,?> recognizer, java.lang.Object offendingSymbol, int line, int charPositionInLine, java.lang.String msg, RecognitionException e) { appendError(line, charPositionInLine, msg); } @Override public void reportContextSensitivity(Parser recognizer, DFA dfa, int startIndex, int stopIndex, int prediction, ATNConfigSet configs) { new String(); } @Override public void reportAttemptingFullContext(Parser recognizer, DFA dfa, int startIndex, int stopIndex, java.util.BitSet conflictingAlts, ATNConfigSet configs) { new String(); } @Override public void reportAmbiguity(Parser recognizer, DFA dfa, int startIndex, int stopIndex, boolean exact, java.util.BitSet ambigAlts, ATNConfigSet configs) { new String(); } }); long startParse = System.currentTimeMillis(); Debug.println(Debug.LEVEL_HIGH, "Parsing program..."); ProgramContext programCtx; parser.getInterpreter().setPredictionMode(PredictionMode.SLL); try { programCtx = parser.program(); // STAGE 1 } catch (Exception ex) { tokenStream.reset(); // rewind input stream parser.reset(); parser.getInterpreter().setPredictionMode(PredictionMode.LL); programCtx = parser.program(); // STAGE 2 // if we parse ok, it's LL not SLL } Debug.println(Debug.LEVEL_MEDIUM, "Parsed program."); long endParse = System.currentTimeMillis(); Debug.println(Debug.LEVEL_MEDIUM, "Parse time: " + (endParse - startParse)); long startGen = System.currentTimeMillis(); OrdinalAllocator idToParameterOrdinalMap = new OrdinalAllocator(); OrdinalAllocator idToVariableOrdinalMap = new OrdinalAllocator(); Debug.println(Debug.LEVEL_MEDIUM, "Generating program..."); BodyInfo bodyInfo = getBodyInfo(idToParameterOrdinalMap, idToVariableOrdinalMap, programCtx); idToParameterOrdinalMap.generate(); idToVariableOrdinalMap.generate(); for(Runnable handler: endHandlers) handler.run(); Debug.println(Debug.LEVEL_MEDIUM, "Generated program."); long endGen = System.currentTimeMillis(); Debug.println(Debug.LEVEL_MEDIUM, "Generate time: " + (endGen - startGen)); return new CustomProcess(idToParameterOrdinalMap.size(), bodyInfo.localCount, bodyInfo.instructions.toArray(new Instruction[bodyInfo.instructions.size()])); } private DuroListener createBodyListener( final ConditionalTreeWalker walker, OrdinalAllocator idToParameterOrdinalMap, OrdinalAllocator idToVariableOrdinalMap, final ArrayList<Instruction> instructions) { return new DuroBaseListener() { @Override public void exitProgram(ProgramContext ctx) { instructions.add(new Instruction(Instruction.OPCODE_FINISH)); } @Override public void exitRootExpression(RootExpressionContext ctx) { instructions.add(new Instruction(Instruction.OPCODE_POP)); } @Override public void exitBinaryMessage(BinaryMessageContext ctx) { String id = ctx.BIN_OP().getText(); instructions.add(new Instruction(Instruction.OPCODE_SEND, id, 1)); } private Stack<PrimitiveGenerator> primitiveGeneratorStack = new Stack<PrimitiveGenerator>(); @Override public void enterMultiArgMessage(MultiArgMessageContext ctx) { String id = ctx.ID_UNCAP().getText() + ctx.ID_CAP().stream().map(x -> x.getText()).collect(Collectors.joining()); int parameterCount = ctx.multiArgMessageArgs().size(); PrimitiveGeneratorFactory primitiveGeneratorFactory = primitiveMap.get(Selector.get(id, parameterCount)); if(primitiveGeneratorFactory != null) { PrimitiveGenerator primitiveGenerator = primitiveGeneratorFactory.create(ctx); primitiveGenerator.enterPrimitive(instructions); primitiveGeneratorStack.push(primitiveGenerator); } else primitiveGeneratorStack.push(null); } @Override public void exitMultiArgMessage(MultiArgMessageContext ctx) { PrimitiveGenerator primitiveGenerator = primitiveGeneratorStack.pop(); if(primitiveGenerator != null) primitiveGenerator.exitPrimitive(instructions); } @Override public void enterInteger(IntegerContext ctx) { int value = Integer.parseInt(ctx.INT().getText()); instructions.add(new Instruction(Instruction.OPCODE_LOAD_INT, value)); } @Override public void enterString(StringContext ctx) { String rawString = ctx.getText(); // Should the string enter properly prepared? // - i.e., no need for filtering the string. String string = extractStringLiteral(rawString); instructions.add(new Instruction(Instruction.OPCODE_LOAD_STRING, string)); } }; } private void onEnd(ArrayList<Instruction> instructions, Supplier<Instruction> instructionSup) { int index = instructions.size(); instructions.add(null); endHandlers.add(() -> { Instruction instruction = instructionSup.get(); instructions.set(index, instruction); }); } private static String extractStringLiteral(String rawString) { return rawString.substring(1, rawString.length() - 1) .replace("\\n", "\n") .replace("\\r", "\r") .replace("\\t", "\t"); } private BodyInfo getBodyInfo(OrdinalAllocator idToParameterOrdinalMap, OrdinalAllocator idToVariableOrdinalMap, ParseTree tree) { ArrayList<Instruction> instructions = new ArrayList<Instruction>(); ConditionalTreeWalker walker = new ConditionalTreeWalker(); walker.walk( createBodyListener(walker, idToParameterOrdinalMap, idToVariableOrdinalMap, instructions), tree ); int variableCount = idToVariableOrdinalMap.size(); return new BodyInfo(variableCount, instructions); } private static class BodyInfo { private final int localCount; private final ArrayList<Instruction> instructions; public BodyInfo(int ordinalCount, ArrayList<Instruction> instructions) { this.localCount = ordinalCount; this.instructions = instructions; } } }
package innovimax.mixthem; import innovimax.mixthem.arguments.*; import innovimax.mixthem.io.*; import innovimax.mixthem.operation.*; import java.io.IOException; import java.io.OutputStream; import java.util.Map; import java.util.logging.ConsoleHandler; import java.util.logging.Handler; import java.util.logging.Level; import java.util.logging.Logger; /** * <p>Mix files together using variety of rules.</p> * <p>Here are the rules:</p> * <ul> * <li> 1: will output file1</li> * <li> 2: will output file2</li> * <li> +: will output file1+file2</li> * <li> alt-line: will output one line of each starting with first line of file1</li> * <li> alt-char: will output one char of each starting with first char of file1</li> * <li> alt-byte: will output one byte of each starting with first char of file1</li> * <li> random-alt-line[#seed]: will output one line of each code randomly based on a seed for reproducability</li> * <li> random-alt-char[#seed]: will output one char of each code randomly based on a seed for reproducability</li> * <li> random-alt-byte[#seed]: will output one byte of each code randomly based on a seed for reproducability</li> * <li> join[#col1][#col2]: will output merging of lines that have common occurrence</li> * </ul> * @author Innovimax * @version 1.0 */ public class MixThem { public final static Logger LOGGER = Logger.getLogger(MixThem.class.getName()); private final InputResource input1, input2; private final OutputStream out; /** * Constructor * @param input1 The first input resource to be mixed * @param input2 The second input resource to be mixed * @param out The output stream to write mixing result */ public MixThem(InputResource input1, InputResource input2, OutputStream out) { this.input1 = input1; this.input2 = input2; this.out = out; } static void setLogging(Level level) { if (LOGGER.getHandlers().length == 0) { //System.setProperty("java.util.logging.SimpleFormatter.format", "[%4$s] MixThem: %5$s [%1$tc]%n"); System.setProperty("java.util.logging.SimpleFormatter.format", "[%4$s] MixThem: %5$s%n"); LOGGER.setUseParentHandlers(false); LOGGER.setLevel(Level.ALL); Handler handler = new ConsoleHandler(); LOGGER.addHandler(handler); handler.setLevel(Level.OFF); String prop = System.getProperty("mixthem.logging"); if (prop == null || prop.equals("true")) { handler.setLevel(level); } } } /** * Main entry. * @param args The command line arguments */ public static void main(String[] args) { run(args); } private static void run(String[] args) { try { setLogging(Level.INFO); LOGGER.info("Started application"); Arguments mixArgs = Arguments.checkArguments(args); MixThem mixThem = new MixThem(mixArgs.getFirstInput(), mixArgs.getSecondInput(), System.out); mixThem.process(mixArgs.getMode(), mixArgs.getRule(), mixArgs.getRuleParameters()); LOGGER.info("Exited application with no errors"); } catch (ArgumentException e) { LOGGER.severe("Exited application with errors..."); LOGGER.severe("Files mixing can't be run due to following reason:"); LOGGER.severe(e.getMessage()); Arguments.printUsage(); } catch (MixException e) { LOGGER.severe("Exited application with errors..."); LOGGER.severe("Files mixing has been aborted due to following reason:"); LOGGER.severe(e.getMessage()); } catch (Exception e) { LOGGER.severe("Exited application with errors..."); LOGGER.severe("An unexpected error occurs:"); e.printStackTrace(); } } /** * Mix files together using rules. * @param mode The mode to be used for mixing * @param rule The rule to be used for mixing * @param params The rule parameters to be used for mixing * @throws MixException - If any error occurs during mixing * @see innovimax.mixthem.Mode * @see innovimax.mixthem.Rule * @see innovimax.mixthem.RuleParam * @see innovimax.mixthem.ParamValue */ public void process(Mode mode, Rule rule, Map<RuleParam, ParamValue> params) throws MixException { try { LOGGER.info("Started mixing for [" + mode.getName() + "] rule '" + rule.getName() + "'..."); switch(rule) { case FILE_1: ICopy file1Copy = CopyFactory.newInstance(mode); file1Copy.processFile(this.input1, this.out); break; case FILE_2: ICopy file2Copy = CopyFactory.newInstance(mode); file2Copy.processFile(this.input2, this.out); break; case ADD: ICopy fileAddCopy = CopyFactory.newInstance(mode); fileAddCopy.processFile(this.input1, this.out); fileAddCopy.processFile(this.input2, this.out); break; case ALT_CHAR: IOperation altCharOp = new DefaultCharAlternation(params); altCharOp.processFiles(this.input1, this.input2, this.out); break; case ALT_BYTE: IOperation altByteOp = new DefaultByteAlternation(params); altByteOp.processFiles(this.input1, this.input2, this.out); break; case ALT_LINE: IOperation altLineOp = new DefaultLineAlternation(AltMode.NORMAL, params); altLineOp.processFiles(this.input1, this.input2, this.out); break; case RANDOM_ALT_LINE: IOperation randomAltLineOp = new DefaultLineAlternation(AltMode.RANDOM, params); randomAltLineOp.processFiles(this.input1, this.input2, this.out); break; case JOIN: IOperation joinLineOp = new DefaultLineJoining(params); joinLineOp.processFiles(this.input1, this.input2, this.out); break; case ZIP_LINE: IOperation zipLineOp = new DefaultLineZipping(ZipType.LINE, params); zipLineOp.processFiles(this.input1, this.input2, this.out); break; case ZIP_CELL: IOperation zipCellOp = new DefaultLineZipping(ZipType.CELL, params); zipCellOp.processFiles(this.input1, this.input2, this.out); break; case ZIP_CHAR: IOperation zipCharOp = new DefaultCharZipping(params); zipCharOp.processFiles(this.input1, this.input2, this.out); /*break; default: System.out.println("This rule has not been implemented yet.");*/ } LOGGER.info("Ended mixing for [" + mode.getName() + "] rule '" + rule.getName() + "'."); } catch (IOException e) { throw new MixException("Unexpected file error", e); } } }
package com.educa.database; import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; import com.educa.entity.Exercise; import java.util.ArrayList; public class DataBase extends SQLiteOpenHelper { private static DataBase instance; private static final String COLUNA_ALUNO_ID = "ID"; private static final String COLUNA_ALUNO_TURMA = "Turma"; private static final String COLUNA_ALUNO_NOME = "Nome"; private static final String TABLE_ALUNO = "Aluno"; private static final String SQL_CREATE_ALUNO = "CREATE TABLE " + TABLE_ALUNO + "(" + COLUNA_ALUNO_ID + " INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT," + COLUNA_ALUNO_TURMA + " INTEGER," + COLUNA_ALUNO_NOME + " VARCHAR );"; private static final String COLUNA_EXERCICIO_ID = "ID"; private static final String COLUNA_EXERCICIO_NAME = "name"; private static final String COLUNA_EXERCICIO_QUESTION = "question"; private static final String COLUNA_EXERCICIO_TYPE = "type", COLUNA_EXERCICIO_DATE = "date"; private static final String COLUNA_EXERCICIO_STATUS = "status"; private static final String COLUNA_EXERCICIO_CORRECTION = "correction"; private static final String TABLE_EXERCICIO = "Exercicio"; private static final String SQL_CREATE_EXERCICIO = "CREATE TABLE " + TABLE_EXERCICIO + "(" + COLUNA_EXERCICIO_ID + " INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT," + COLUNA_EXERCICIO_NAME + " VARCHAR," + COLUNA_EXERCICIO_QUESTION + " VARCHAR," + COLUNA_EXERCICIO_TYPE + " VARCHAR," + COLUNA_EXERCICIO_DATE + " VARCHAR," + COLUNA_EXERCICIO_STATUS + " VARCHAR," + COLUNA_EXERCICIO_CORRECTION + " VARCHAR );"; private static final String COLUNA_PROFESSOR_ID = "ID"; private static final String COLUNA_PROFESSOR_TURMAS = "Turmas"; private static final String COLUNA_PROFESSOR_NOME = "Nome"; private static final String TABLE_PROFESSOR = "Professor"; private static final String SQL_CREATE_PROFESSOR = "CREATE TABLE " + TABLE_PROFESSOR + "(" + COLUNA_PROFESSOR_ID + " INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT," + COLUNA_PROFESSOR_TURMAS + " VARCHAR," + COLUNA_PROFESSOR_NOME + " VARCHAR );"; private static final String SQL_DELETE_ALUNO_TABLE = "DROP TABLE IF EXISTS Aluno"; private static final String SQL_DELETE_EXERCICIO_TABLE = "DROP TABLE IF EXISTS Exercicio"; private static final String SQL_DELETE_PROFESSOR_TABLE = "DROP TABLE IF EXISTS PROFESSOR"; private DataBase(Context context) { super(context, "educa.db", null, 1); } public static DataBase getInstance(final Context context) { if (instance == null) { instance = new DataBase(context); } return instance; } @Override public void onCreate(SQLiteDatabase db) { db.execSQL(SQL_CREATE_ALUNO); db.execSQL(SQL_CREATE_EXERCICIO); db.execSQL(SQL_CREATE_PROFESSOR); db.close(); } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { db.execSQL(SQL_DELETE_ALUNO_TABLE); db.execSQL(SQL_DELETE_EXERCICIO_TABLE); db.execSQL(SQL_DELETE_PROFESSOR_TABLE); onCreate(db); db.close(); } @Override public final void onDowngrade(final SQLiteDatabase dbHelper, final int oldVersion, final int newVersion) { onUpgrade(dbHelper, oldVersion, newVersion); dbHelper.close(); } public long addProfessor(String nome, String turmas) { final SQLiteDatabase db = getWritableDatabase(); final ContentValues values = new ContentValues(); values.put(COLUNA_PROFESSOR_NOME, nome); values.put(COLUNA_PROFESSOR_TURMAS, turmas); long id = db.insert(TABLE_PROFESSOR, null, values); db.close(); return id; } public ArrayList<String> getProfessor(long id) { ArrayList<String> array = new ArrayList<String>(); final SQLiteDatabase db = getWritableDatabase(); // refazer com rawQuery final Cursor cursor = db.query(TABLE_PROFESSOR, null, COLUNA_PROFESSOR_ID + " = '" + id + "'", new String[] {}, null, null, null); if (cursor.getCount() > 0 && cursor.moveToFirst()) { array.add(String.valueOf(cursor.getInt(0))); array.add(cursor.getString(1)); array.add(cursor.getString(2)); } cursor.close(); db.close(); return array; } public ArrayList<String> getProfessor(String nome, String turmas) { ArrayList<String> array = new ArrayList<String>(); final SQLiteDatabase db = getWritableDatabase(); // refazer com rawQuery final Cursor cursor = db.query(TABLE_PROFESSOR, null, COLUNA_PROFESSOR_NOME + " = '" + nome + "' and " + COLUNA_PROFESSOR_TURMAS + " = '" + turmas + "'", new String[] {}, null, null, null); if (cursor.getCount() > 0 && cursor.moveToFirst()) { array.add(String.valueOf(cursor.getInt(0))); array.add(cursor.getString(1)); array.add(cursor.getString(2)); } cursor.close(); db.close(); return array; } public final long addExercise(Exercise exercise) { final SQLiteDatabase db = getWritableDatabase(); final ContentValues values = new ContentValues(); values.put(COLUNA_EXERCICIO_NAME, exercise.getName()); values.put(COLUNA_EXERCICIO_QUESTION, exercise.getQuestion()); values.put(COLUNA_EXERCICIO_TYPE, exercise.getType()); values.put(COLUNA_EXERCICIO_DATE, exercise.getDate()); values.put(COLUNA_EXERCICIO_STATUS, String.valueOf(exercise.getStatus())); values.put(COLUNA_EXERCICIO_CORRECTION, String.valueOf(exercise.getCorrection())); long id = db.insert(TABLE_PROFESSOR, null, values); db.close(); return id; } public final long removeExercise(Exercise exercise) { final SQLiteDatabase db = getWritableDatabase(); String whereClause = COLUNA_EXERCICIO_NAME + " = '" + exercise.getName() + "' and " + COLUNA_EXERCICIO_QUESTION + " = '" + exercise.getQuestion() + "' and " + COLUNA_EXERCICIO_TYPE + " = '" + exercise.getType() + "' and " + COLUNA_EXERCICIO_DATE + " = '" + exercise.getDate() + "' and " + COLUNA_EXERCICIO_STATUS + " = '" + String.valueOf(exercise.getStatus()) + "' and " + COLUNA_EXERCICIO_CORRECTION + " = '" + String.valueOf(exercise.getCorrection()) + "'"; long id = db.delete(TABLE_EXERCICIO, whereClause, null); db.close(); return id; } public final long removeExercise(long id) { final SQLiteDatabase db = getWritableDatabase(); String whereClause = COLUNA_EXERCICIO_ID + " = '" + id + "'"; long id_ = db.delete(TABLE_EXERCICIO, whereClause, null); db.close(); return id_; } public ArrayList<Exercise> getListExercise() { ArrayList<Exercise> array = new ArrayList<Exercise>(); final SQLiteDatabase db = getWritableDatabase(); final Cursor cursor = db.query(TABLE_EXERCICIO, null, null, new String[] {}, null, null, null); if (cursor.getCount() > 0 && cursor.moveToFirst()) { for (int i = 0; i < cursor.getCount(); i++) { String name = cursor.getString(1), question = cursor.getString(2), type = cursor .getString(3), date = cursor.getString(4); /* * Status status = null; for (Status s : Status.values()) { if * (s.getStatus().equalsIgnoreCase(cursor.getString(5))) { * status = s; break; } } */ /* * Correction correction = null; for (Correction c : * Correction.values()) { if * (c.getCorrection().equalsIgnoreCase(cursor.getString(6))) { * correction = c; break; } } Exercise e = new Exercise(name, * question, type, date, status, correction); array.add(e); */ cursor.moveToNext(); } } cursor.close(); db.close(); return array; } public long updateExercise(Exercise exercise) { final SQLiteDatabase db = getWritableDatabase(); final ContentValues values = new ContentValues(); values.put(COLUNA_EXERCICIO_NAME, exercise.getName()); values.put(COLUNA_EXERCICIO_QUESTION, exercise.getQuestion()); values.put(COLUNA_EXERCICIO_TYPE, exercise.getType()); values.put(COLUNA_EXERCICIO_DATE, exercise.getDate()); values.put(COLUNA_EXERCICIO_STATUS, String.valueOf(exercise.getStatus())); values.put(COLUNA_EXERCICIO_CORRECTION, String.valueOf(exercise.getCorrection())); String whereClause = COLUNA_EXERCICIO_ID + " = '" + getExerciseId(exercise) + "'"; long id = db.update(TABLE_EXERCICIO, values, whereClause, null); db.close(); return id; } public long getExerciseId(Exercise exercise) { final SQLiteDatabase db = getWritableDatabase(); String sql = "SELECT " + COLUNA_EXERCICIO_ID + " FROM " + TABLE_EXERCICIO + " WHERE " + COLUNA_EXERCICIO_NAME + " = '" + exercise.getName() + "'"; final Cursor cursor = db.rawQuery(sql, null); long id = cursor.getCount() > 0 && cursor.moveToFirst() ? cursor.getInt(0) : 0; cursor.close(); db.close(); return id; } }
package io.kortex.aes; public class CipherBlock { private byte[] salt; private byte[] iv; private byte[] cipherText; public CipherBlock(byte[] salt, byte[] iv, byte[] cipherText) { super(); this.salt = salt.clone(); this.iv = iv.clone(); this.cipherText = cipherText.clone(); } public byte[] getSalt() { return salt.clone(); } public byte[] getIv() { return iv.clone(); } public byte[] getCipherText() { return cipherText.clone(); } @override public int hashCode(){ return 1; } }
package fi.nls.oskari.cache; import fi.nls.oskari.log.LogFactory; import fi.nls.oskari.log.Logger; import fi.nls.oskari.service.ServiceRuntimeException; import fi.nls.oskari.util.ConversionHelper; import fi.nls.oskari.util.PropertyUtil; import redis.clients.jedis.Jedis; import redis.clients.jedis.JedisPool; import redis.clients.jedis.JedisPoolConfig; import redis.clients.jedis.exceptions.JedisConnectionException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; /** * Manages Jedis connections using JedisPool (connection pool) */ public class JedisManager { public static String ERROR_REDIS_COMMUNICATION_FAILURE = "redis_communication_failure"; public static final int EXPIRY_TIME_DAY = 86400; private final static Logger log = LogFactory.getLogger(JedisManager.class); private static final JedisManager instance = new JedisManager(); private static volatile JedisPool pool; private static final String KEY_REDIS_HOSTNAME = "redis.hostname"; private static final String KEY_REDIS_PORT = "redis.port"; private static final String KEY_REDIS_POOL_SIZE = "redis.pool.size"; /** * Blocking construction of instances from other classes by making constructor private */ private JedisManager() {} public static String getHost() { return PropertyUtil.get(KEY_REDIS_HOSTNAME, "localhost"); } public static int getPort() { return ConversionHelper.getInt(PropertyUtil.get(KEY_REDIS_PORT), 6379); } public static int getPoolSize() { return ConversionHelper.getInt(PropertyUtil.get(KEY_REDIS_POOL_SIZE), 30); } public static void connect() { JedisManager.connect(getPoolSize(), getHost(), getPort()); } /** * Connects configured connection pool to a Redis server */ public static void connect(final int poolSize, final String host, final int port) { if(pool != null) { log.warn("Pool already created! Connect called multiple times. Tried connecting to:", host); return; } final JedisPoolConfig poolConfig = new JedisPoolConfig(); poolConfig.setTestOnBorrow(true); poolConfig.setTestOnReturn(true); poolConfig.setTestWhileIdle(true); poolConfig.setMaxIdle(poolSize / 2); poolConfig.setMinIdle(1); poolConfig.setTimeBetweenEvictionRunsMillis(-1); poolConfig.setTestOnBorrow(true); final JedisPool oldPool = pool; pool = new JedisPool(poolConfig, host, port); log.debug("Created Redis connection pool with host", host, "port", port); if (null != oldPool) { log.debug("Closing old Jedis pool"); oldPool.close(); } } public static void shutdown() { pool.close(); } /** * Destroys the pool */ public void release() { pool.destroy(); } /** * Gets Jedis connection from the pool * @return Jedis instance or ServiceRuntimeExceptionin */ public Jedis getJedis() { return getJedis(false); } public Jedis getJedis(boolean throwException) { try { return pool.getResource(); } catch (Exception e) { log.error("Getting Jedis connection from the pool failed:", e.getMessage()); if (e.getCause() != null) { log.debug(e, "Cause:", e.getCause().getMessage()); } if(throwException) { throw new ServiceRuntimeException("Getting Jedis connection from the pool failed: " + e.getMessage(), e.getCause(), ERROR_REDIS_COMMUNICATION_FAILURE); } } return null; } /** * Thread-safe String GET for Redis * * @param key * @return string */ public static String getNecessary(String key) { return get(key, true); } /** * Thread-safe String GET for Redis * throws new runtime exception, if any exception found * @param key * @return string */ public static String get(String key) { return get(key, false); } /** * Thread-safe String GET for Redis * * @param key * @param throwException throws new runtime exception, if any exception found * @return string */ public static String get(String key, boolean throwException) { try (Jedis jedis = instance.getJedis(throwException)){ if (jedis == null) { return null; } return jedis.get(key); } catch (JedisConnectionException e) { log.error("Failed to get", key); if (throwException) { throw new ServiceRuntimeException("Failed to get " + key + " returning broken connection...: " + e.getMessage(), e.getCause(), ERROR_REDIS_COMMUNICATION_FAILURE); } return null; } catch (Exception e) { log.error("Getting", key, "from Redis failed:", e.getMessage()); if (throwException) { throw new ServiceRuntimeException("Getting" + key + "from Redis failed: " + e.getMessage(), e.getCause(), ERROR_REDIS_COMMUNICATION_FAILURE); } return null; } } /** * Thread-safe byte[] GET for Redis * * @param key * @return bytes */ public static byte[] get(byte[] key) { try (Jedis jedis = instance.getJedis()){ if (jedis == null) { return null; } return jedis.get(key); } catch(JedisConnectionException e) { log.error("Failed to get", key); log.error("Broken connection closed"); return null; } catch (Exception e) { log.error("Getting", key, "from Redis failed:", e.getMessage()); return null; } } /** * Thread-safe String SETEX for Redis * * @param key * @param seconds * @param value * @return string */ public static String setex(String key, int seconds, String value) { try (Jedis jedis = instance.getJedis()){ if (jedis == null) { return null; } return jedis.setex(key, seconds, value); } catch(JedisConnectionException e) { log.error("Failed to set", key); return null; } catch (Exception e) { log.error("Setting", key, "to Redis failed:", e.getMessage()); return null; } } /** * Thread-safe byte[] SETEX for Redis * * @param key * @param seconds * @param value * @return string */ public static String setex(byte[] key, int seconds, byte[] value) { try (Jedis jedis = instance.getJedis()){ if (jedis == null) { return null; } return jedis.setex(key, seconds, value); } catch(JedisConnectionException e) { log.error("Failed to set", key); return null; } catch (Exception e) { log.error("Setting", key, "to Redis failed:", e.getMessage()); return null; } } /** * Thread-safe KEYS * * @param pattern * @return keys */ public static Set<String> keys(String pattern) { try (Jedis jedis = instance.getJedis()){ if (jedis == null) { return Collections.emptySet(); } return jedis.keys(pattern + "*"); } catch(JedisConnectionException e) { log.error("Failed to run KEYS", pattern); return null; } catch (Exception e) { log.error("Running KEYS", pattern + "on Redis failed:", e.getMessage()); return null; } } /** * Thread-safe String HKEYS for Redis * * @param key * @return set of string */ public static Set<String> hkeys(String key) { try (Jedis jedis = instance.getJedis()){ if (jedis == null) { return Collections.emptySet(); } return jedis.hkeys(key); } catch(JedisConnectionException e) { log.error("Failed to hkeys", key); return null; } catch (Exception e) { log.error("Getting HKEYS", key + "on Redis failed:", e.getMessage()); return null; } } /** * Thread-safe String HGET for Redis * * @param key * @param field * @return string */ public static String hget(String key, String field) { try (Jedis jedis = instance.getJedis()){ if (jedis == null) { return null; } return jedis.hget(key, field); } catch(JedisConnectionException e) { log.error("Failed to hget", key); return null; } catch (Exception e) { log.error("Getting HGET", key + "on Redis failed:", e.getMessage()); return null; } } /** * Thread-safe Long HSET for Redis * * @param key * @param field * @return string */ public static Long hset(String key, String field, String value) { try (Jedis jedis = instance.getJedis()){ if (jedis == null) { return null; } return jedis.hset(key, field, value); } catch(JedisConnectionException e) { log.error("Failed to hget", key); return null; } catch (Exception e) { log.error("Getting", key, "failed miserably"); return null; } } /** * Thread-safe Long DEL for Redis * * @param keys * @return long */ public static Long del(String... keys) { try (Jedis jedis = instance.getJedis()){ if (jedis == null) { return null; } return jedis.del(keys); } catch(JedisConnectionException e) { log.error("Failed to delete", keys); return null; } catch (Exception e) { log.error("Deleting", keys, "failed miserably"); return null; } } /** * Thread-safe Long DEL with key set * * @param key * @return long */ public static Long delAll(String key) { try (Jedis jedis = instance.getJedis()){ if (jedis == null) { return null; } Set<String> keys = jedis.keys(key + "*"); if(keys.size() > 0) { return jedis.del(keys.toArray(new String[keys.size()])); } return 0L; } catch(JedisConnectionException e) { log.error("Failed to del", key + "*"); return null; } catch (Exception e) { log.error("Deleting", key + "* failed miserably"); return null; } } /** * Returns length of string for a key (0 if key doesn't exist). * -1 means system level error. * @param key * @return */ public static long getValueStringLength(String key) { try (Jedis jedis = instance.getJedis()){ if (jedis == null) { return -1; } return jedis.strlen(key); } catch(JedisConnectionException e) { log.error("Failed to strlen", key); } catch (Exception e) { log.error("Getting key length", key + " failed miserably"); } return -1; } /** * Returns the number of elements inside the list after the push operation. * -1 means system level error. * @param key * @param values * @return */ public static long pushToList(String key, String ...values) { try (Jedis jedis = instance.getJedis()){ if (jedis == null) { return -1; } return jedis.rpush(key, values); } catch(JedisConnectionException e) { log.error("Failed to rpush", key); } catch (Exception e) { log.error("Adding to list", key + " failed miserably"); } return -1; } /** * Removes and returns the last element from the list. * @param key * @return */ public static String popList(String key) { return popList(key, false); } /** * Removes and returns an item from list. * With head is true uses the first element, with false the last element. * @param key the list key * @param head * @return */ public static String popList(String key, boolean head) { try (Jedis jedis = instance.getJedis()){ if (jedis == null) { return null; } String value; if(head) { value = jedis.lpop(key); } else { value = jedis.rpop(key); } if("nil".equalsIgnoreCase(value)) { // If the key does not exist or the list is already empty the special value 'nil' is returned. return null; } return value; } catch(JedisConnectionException e) { log.error("Failed to lpop", key); } catch (Exception e) { log.error("Popping from list", key + " failed miserably"); } return null; } /** * Thread-safe PUBLISH * * @param channel * @param message * @return long */ public static Long publish(final String channel, final String message) { try (Jedis jedis = instance.getJedis()){ if (jedis == null) { return null; } return jedis.publish(channel, message); } catch(JedisConnectionException e) { log.error("Failed to publish on:", channel); return null; } catch (Exception e) { log.error("Publishing on:", channel, "failed miserably"); return null; } } /** * Thread-safe SUBSCRIBE * @deprecated Use org.oskari.cache.JedisListener instead * * @param subscriber * @param channel */ @Deprecated public static void subscribe(final JedisSubscriber subscriber, final String channel) { new Thread(() -> { // "Make sure the subscriber and publisher threads do not share the same Jedis connection." // A client subscribed to one or more channels should not issue commands, // although it can subscribe and unsubscribe to and from other channels. // NOTE!! create a new client for subscriptions instead of using pool to make sure clients don't conflict try (Jedis jedis = new Jedis(getHost(), getPort())) { if (jedis == null) { return; } log.warn("Subscribing on", channel); // Subscribe is a blocking action hence the thread // Also we don't care about pooling here since // the client remains blocked for subscription jedis.subscribe(subscriber, channel); } catch (Exception e) { log.error(e,"Subscribing on:", channel, "failed"); } } ).start(); } }
package net.querz.nbt; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.util.*; import java.util.function.BiConsumer; public class CompoundTag extends Tag<Map<String, Tag<?>>> implements Iterable<Map.Entry<String, Tag<?>>> { public CompoundTag() {} @Override protected Map<String, Tag<?>> getEmptyValue() { return new HashMap<>(8); } public int size() { return getValue().size(); } public Tag<?> remove(String key) { return getValue().remove(key); } public void clear() { getValue().clear(); } public boolean containsKey(String key) { return getValue().containsKey(key); } public boolean containsValue(Tag<?> value) { return getValue().containsValue(value); } public Collection<Tag<?>> values() { return getValue().values(); } public Set<String> keySet() { return getValue().keySet(); } public Set<Map.Entry<String, Tag<?>>> entrySet() { return new NonNullEntrySet<>(getValue().entrySet()); } @Override public Iterator<Map.Entry<String, Tag<?>>> iterator() { return entrySet().iterator(); } public void forEach(BiConsumer<String, Tag<?>> action) { getValue().forEach(action); } public <C extends Tag<?>> C get(String key, Class<C> type) { Tag<?> t = getValue().get(key); if (t != null) { return type.cast(t); } return null; } public Tag<?> get(String key) { return getValue().get(key); } public ByteTag getByteTag(String key) { return get(key, ByteTag.class); } public ShortTag getShortTag(String key) { return get(key, ShortTag.class); } public IntTag getIntTag(String key) { return get(key, IntTag.class); } public LongTag getLongTag(String key) { return get(key, LongTag.class); } public FloatTag getFloatTag(String key) { return get(key, FloatTag.class); } public DoubleTag getDoubleTag(String key) { return get(key, DoubleTag.class); } public StringTag getStringTag(String key) { return get(key, StringTag.class); } public ByteArrayTag getByteArrayTag(String key) { return get(key, ByteArrayTag.class); } public IntArrayTag getIntArrayTag(String key) { return get(key, IntArrayTag.class); } public LongArrayTag getLongArrayTag(String key) { return get(key, LongArrayTag.class); } public ListTag<?> getListTag(String key) { return get(key, ListTag.class); } public CompoundTag getCompoundTag(String key) { return get(key, CompoundTag.class); } public boolean getBoolean(String key) { Tag<?> t = get(key); return t instanceof ByteTag && ((ByteTag) t).asByte() > 0; } public byte getByte(String key) { ByteTag t = getByteTag(key); return t == null ? new ByteTag().getEmptyValue() : t.asByte(); } public short getShort(String key) { ShortTag t = getShortTag(key); return t == null ? new ShortTag().getEmptyValue() : t.asShort(); } public int getInt(String key) { IntTag t = getIntTag(key); return t == null ? new IntTag().getEmptyValue() : t.asInt(); } public long getLong(String key) { LongTag t = getLongTag(key); return t == null ? new LongTag().getEmptyValue() : t.asLong(); } public float getFloat(String key) { FloatTag t = getFloatTag(key); return t == null ? new FloatTag().getEmptyValue() : t.asFloat(); } public double getDouble(String key) { DoubleTag t = getDoubleTag(key); return t == null ? new DoubleTag().getEmptyValue() : t.asDouble(); } public String getString(String key) { StringTag t = getStringTag(key); return t == null ? new StringTag().getEmptyValue() : t.getValue(); } public byte[] getByteArray(String key) { ByteArrayTag t = getByteArrayTag(key); return t == null ? new ByteArrayTag().getEmptyValue() : t.getValue(); } public int[] getIntArray(String key) { IntArrayTag t = getIntArrayTag(key); return t == null ? new IntArrayTag().getEmptyValue() : t.getValue(); } public long[] getLongArray(String key) { LongArrayTag t = getLongArrayTag(key); return t == null ? new LongArrayTag().getEmptyValue() : t.getValue(); } public Tag<?> put(String key, Tag<?> tag) { return getValue().put(checkNull(key), checkNull(tag)); } public Tag<?> putBoolean(String key, boolean value) { return put(key, new ByteTag(value)); } public Tag<?> putByte(String key, byte value) { return put(key, new ByteTag(value)); } public Tag<?> putShort(String key, short value) { return put(key, new ShortTag(value)); } public Tag<?> putInt(String key, int value) { return put(key, new IntTag(value)); } public Tag<?> putLong(String key, long value) { return put(key, new LongTag(value)); } public Tag<?> putFloat(String key, float value) { return put(key, new FloatTag(value)); } public Tag<?> putDouble(String key, double value) { return put(key, new DoubleTag(value)); } public Tag<?> putString(String key, String value) { return put(key, new StringTag(checkNull(value))); } public Tag<?> putByteArray(String key, byte[] value) { return put(key, new ByteArrayTag(checkNull(value))); } public Tag<?> putIntArray(String key, int[] value) { return put(key, new IntArrayTag(checkNull(value))); } public Tag<?> putLongArray(String key, long[] value) { return put(key, new LongArrayTag(checkNull(value))); } @Override public void serializeValue(DataOutputStream dos, int depth) throws IOException { for (Map.Entry<String, Tag<?>> e : getValue().entrySet()) { dos.writeByte(e.getValue().getID()); dos.writeUTF(e.getKey()); e.getValue().serializeValue(dos, incrementDepth(depth)); } EndTag.INSTANCE.serialize(dos, depth); } @Override public void deserializeValue(DataInputStream dis, int depth) throws IOException { for (int id = dis.readByte() & 0xFF; id != 0; id = dis.readByte() & 0xFF) { Tag<?> tag = TagFactory.fromID(id); String name = dis.readUTF(); tag.deserializeValue(dis, incrementDepth(depth)); put(name, tag); } } @Override public String valueToString(int depth) { StringBuilder sb = new StringBuilder("{"); boolean first = true; for (Map.Entry<String, Tag<?>> e : getValue().entrySet()) { sb.append(first ? "" : ",") .append(escapeString(e.getKey(), false)).append(":") .append(e.getValue().toString(incrementDepth(depth))); first = false; } sb.append("}"); return sb.toString(); } @Override public String valueToTagString(int depth) { StringBuilder sb = new StringBuilder("{"); boolean first = true; for (Map.Entry<String, Tag<?>> e : getValue().entrySet()) { sb.append(first ? "" : ",") .append(escapeString(e.getKey(), true)).append(":") .append(e.getValue().valueToTagString(incrementDepth(depth))); first = false; } sb.append("}"); return sb.toString(); } @Override public boolean equals(Object other) { if (this == other) { return true; } if (!super.equals(other) || size() != ((CompoundTag) other).size()) { return false; } for (Map.Entry<String, Tag<?>> e : getValue().entrySet()) { Tag<?> v; if ((v = ((CompoundTag) other).get(e.getKey())) == null || !e.getValue().equals(v)) { return false; } } return true; } @Override public int compareTo(Tag<Map<String, Tag<?>>> o) { if (!(o instanceof CompoundTag)) { return 0; } return Integer.compare(size(), o.getValue().size()); } @Override public CompoundTag clone() { CompoundTag copy = new CompoundTag(); for (Map.Entry<String, Tag<?>> e : getValue().entrySet()) { copy.put(e.getKey(), e.getValue().clone()); } return copy; } }
package org.basex.tests.w3c; import static org.basex.core.Prop.*; import static org.basex.tests.w3c.QT3Constants.*; import static org.basex.util.Token.*; import java.io.*; import java.util.*; import org.basex.core.*; import org.basex.io.*; import org.basex.io.out.*; import org.basex.query.*; import org.basex.query.func.*; import org.basex.query.util.Compare.Flag; import org.basex.query.value.item.*; import org.basex.query.value.type.*; import org.basex.tests.bxapi.*; import org.basex.tests.bxapi.xdm.*; import org.basex.util.*; public final class QT3TS { /** Test suite id. */ private final String testid = "qt3ts"; /** Path to the test suite (ignored if {@code null}). */ private String basePath; /** Maximum length of result output. */ private int maxout = 2000; /** Correct results. */ private final TokenBuilder right = new TokenBuilder(); /** Wrong results. */ private final TokenBuilder wrong = new TokenBuilder(); /** Ignored tests. */ private final TokenBuilder ignore = new TokenBuilder(); /** Number of total queries. */ private int total; /** Number of tested queries. */ private int tested; /** Number of correct queries. */ private int correct; /** Number of ignored queries. */ private int ignored; /** Current base uri. */ private String base; /** Slow queries flag. */ private TreeMap<Long, String> slow; /** Query filter string. */ private String single = ""; /** Verbose flag. */ private boolean verbose; /** Error code flag. */ private boolean errors = true; /** Also print ignored files. */ private boolean ignoring; /** All flag. */ private boolean all; /** Database context. */ protected final Context ctx = new Context(); /** Global environments. */ private final ArrayList<QT3Env> genvs = new ArrayList<QT3Env>(); /** * Main method of the test class. * @param args command-line arguments * @throws Exception exception */ public static void main(final String[] args) throws Exception { try { new QT3TS().run(args); } catch(final IOException ex) { Util.errln(ex); System.exit(1); } } /** * Runs all tests. * @param args command-line arguments * @throws Exception exception */ private void run(final String[] args) throws Exception { ctx.mprop.set(MainProp.DBPATH, sandbox().path() + "/data"); parseArguments(args); final Performance perf = new Performance(); ctx.prop.set(Prop.CHOP, false); ctx.prop.set(Prop.INTPARSE, false); final XQuery qdoc = new XQuery("doc('" + file(null, CATALOG) + "')", ctx); final XdmValue doc = qdoc.value(); final String version = asString("*:catalog/@version", doc); Util.outln(NL + "QT3 Test Suite " + version); Util.out("Parsing queries"); /** * Runs a single test set. * @param name name of test set * @throws Exception exception */ private void testSet(final String name) throws Exception { final XQuery qdoc = new XQuery("doc(' " + file(null, name) + "')", ctx); final XdmValue doc = qdoc.value(); final XQuery qset = new XQuery("*:test-set", ctx).context(doc); final XdmValue set = qset.value(); base = IO.get(doc.getBaseURI()).dirPath(); qdoc.close(); if(supported(set)) { // parse environment of test-set final XQuery qenv = new XQuery("*:environment", ctx).context(set); final ArrayList<QT3Env> envs = new ArrayList<QT3Env>(); for(final XdmItem ienv : qenv) envs.add(new QT3Env(ctx, ienv)); qenv.close(); // run all test cases final XQuery qts = new XQuery("*:test-case", ctx).context(set); for(final XdmItem its : qts) { try { testCase(its, envs); } catch(final IOException ex) { Util.debug(ex); } } qts.close(); } qset.close(); } /** * Runs a single test case. * @param test node * @param envs environments * @throws Exception exception */ private void testCase(final XdmItem test, final ArrayList<QT3Env> envs) throws Exception { if(total++ % 500 == 0) Util.out("."); if(!supported(test)) { if(ignoring) ignore.add(asString("@name", test)).add(NL); ignored++; return; } // skip queries that do not match filter final String name = asString("@name", test); if(!name.startsWith(single)) { if(ignoring) ignore.add(name).add(NL); ignored++; return; } tested++; // expected result /** * Removes comments from the specified string. * @param in input string * @return result */ private String noComments(final String in) { return QueryProcessor.removeComments(in, maxout); } /** * Checks if the current query is supported. * @param node query context * @return result of check */ private boolean supported(final XdmValue node) { final XQuery q = new XQuery( /** * Returns the specified environment, or {@code null}. * @param envs environments * @param ref reference * @return environment */ private static QT3Env envs(final ArrayList<QT3Env> envs, final String ref) { for(final QT3Env e : envs) if(e.name.equals(ref)) return e; return null; } /** * Tests the result of a test case. * @param result resulting value * @param expected expected result * @return optional expected test suite result */ private String test(final QT3Result result, final XdmValue expected) { final String type = expected.getName(); final XdmValue value = result.value; try { String msg; if(type.equals("error")) { msg = assertError(result, expected); } else if(type.equals("all-of")) { msg = allOf(result, expected); } else if(type.equals("any-of")) { msg = anyOf(result, expected); } else if(value != null) { if(type.equals("assert")) { msg = assertQuery(value, expected); } else if(type.equals("assert-count")) { msg = assertCount(value, expected); } else if(type.equals("assert-deep-eq")) { msg = assertDeepEq(value, expected); } else if(type.equals("assert-empty")) { msg = assertEmpty(value); } else if(type.equals("assert-eq")) { msg = assertEq(value, expected); } else if(type.equals("assert-false")) { msg = assertBoolean(value, false); } else if(type.equals("assert-permutation")) { msg = assertPermutation(value, expected); } else if(type.equals("assert-xml")) { msg = assertSerialization(value, expected); } else if(type.equals("assert-serialization-error")) { msg = assertSerialError(value, expected); } else if(type.equals("assert-string-value")) { msg = assertStringValue(value, expected); } else if(type.equals("assert-true")) { msg = assertBoolean(value, true); } else if(type.equals("assert-type")) { msg = assertType(value, expected); } else { msg = "Test type not supported: " + type; } } else { msg = expected.toString(); } return msg; } catch(final Exception ex) { ex.printStackTrace(); return "Exception: " + ex.getMessage(); } } /** * Tests error. * @param result query result * @param expect expected result * @return optional expected test suite result */ private String assertError(final QT3Result result, final XdmValue expect) { final String exp = asString('@' + CODE, expect); if(result.exc == null) return exp; final String res = result.exc.getCode(); return !errors || exp.equals("*") || exp.equals(res) ? null : exp; } /** * Tests all-of. * @param res resulting value * @param exp expected result * @return optional expected test suite result */ private String allOf(final QT3Result res, final XdmValue exp) { final XQuery query = new XQuery("*", ctx).context(exp); try { final TokenBuilder tb = new TokenBuilder(); for(final XdmItem it : query) { final String msg = test(res, it); if(msg != null) tb.add(tb.isEmpty() ? "" : ", ").add(msg); } return tb.isEmpty() ? null : tb.toString(); } finally { query.close(); } } /** * Tests any-of. * @param res resulting value * @param exp expected result * @return optional expected test suite result */ private String anyOf(final QT3Result res, final XdmValue exp) { final XQuery query = new XQuery("*", ctx).context(exp); final TokenBuilder tb = new TokenBuilder(); try { for(final XdmItem it : query) { final String msg = test(res, it); if(msg == null) return null; tb.add(tb.isEmpty() ? "" : ", ").add(msg); } return "any of { " + tb + " }"; } finally { query.close(); } } /** * Tests assertion. * @param value resulting value * @param expect expected result * @return optional expected test suite result */ private String assertQuery(final XdmValue value, final XdmValue expect) { final String exp = expect.getString(); final XQuery query = new XQuery(exp, ctx); try { return query.bind("result", value).value().getBoolean() ? null : exp; } catch(final XQueryException ex) { // should not occur return ex.getException().getMessage(); } finally { query.close(); } } /** * Tests count. * @param value resulting value * @param expect expected result * @return optional expected test suite result */ private static String assertCount(final XdmValue value, final XdmValue expect) { final long exp = expect.getInteger(); final int res = value.size(); return exp == res ? null : Util.info("% items (% found)", exp, res); } /** * Tests equality. * @param value resulting value * @param expect expected result * @return optional expected test suite result */ private String assertEq(final XdmValue value, final XdmValue expect) { final XQuery query = new XQuery(expect.getString(), ctx); try { final XdmItem exp = query.next(); final XdmItem res = value instanceof XdmItem ? (XdmItem) value : null; return exp.equal(res) ? null : exp.toString(); } catch(final XQueryException err) { return err.getException().getMessage(); } finally { query.close(); } } /** * Tests deep equals. * @param value resulting value * @param expect expected result * @return optional expected test suite result */ private String assertDeepEq(final XdmValue value, final XdmValue expect) { final XQuery query = new XQuery(expect.getString(), ctx); try { final XdmValue exp = query.value(); return exp.deepEqual(value) ? null : exp.toString(); } finally { query.close(); } } /** * Tests permutation. * @param value resulting value * @param expect expected result * @return optional expected test suite result */ private String assertPermutation(final XdmValue value, final XdmValue expect) { final XQuery query = new XQuery(expect.getString(), ctx); try { // cache expected results final HashSet<String> exp = new HashSet<String>(); for(final XdmItem it : query) exp.add(it.getString()); // cache actual results final HashSet<String> res = new HashSet<String>(); for(final XdmItem it : value) res.add(it.getString()); if(exp.size() != res.size()) return Util.info("% results (found: %)", exp.size(), res.size()); for(final String s : exp.toArray(new String[exp.size()])) { if(!res.contains(s)) return Util.info("% (missing)", s); } for(final String s : res.toArray(new String[exp.size()])) { if(!exp.contains(s)) return Util.info("% (missing in expected result)", s); } return null; } finally { query.close(); } } /** * Tests the serialized result. * @param value resulting value * @param expect expected result * @return optional expected test suite result */ private String assertSerialization(final XdmValue value, final XdmValue expect) { final String file = asString("@file", expect); final boolean norm = asBoolean("@normalize-space=('true','1')", expect); final boolean pref = asBoolean("@ignore-prefixes=('true','1')", expect); try { String exp = normNL(file.isEmpty() ? expect.getString() : string(new IOFile(base, file).read())); if(norm) exp = string(norm(token(exp))); final String res = normNL(asString("serialize(., map{ 'indent':='no' })", value)); if(exp.equals(res)) return null; final String r = normNL(asString( "serialize(., map{ 'indent':='no', 'omit-xml-declaration':='no' })", value)); if(exp.equals(r)) return null; // include check for comments, processing instructions and namespaces String flags = "'" + Flag.ALLNODES + "'"; if(!pref) flags += ",'" + Flag.NAMESPACES + "'"; final String query = Function.DEEP_EQUAL_OPT.args("<X>" + exp + "</X>", "<X>" + res + "</X>" , "(" + flags + ")"); return asBoolean(query, expect) ? null : exp; } catch(final IOException ex) { return Util.message(ex); } } /** * Tests a serialization error. * @param value resulting value * @param expect expected result * @return optional expected test suite result */ private String assertSerialError(final XdmValue value, final XdmValue expect) { final String exp = asString('@' + CODE, expect); try { value.toString(); return exp; } catch(final RuntimeException qe) { final String res = qe.getMessage().replaceAll("\\[|\\].*\r?\n?.*", ""); return !errors || exp.equals("*") || exp.equals(res) ? null : Util.info("% (found: %)", exp, res); } } /** * Tests string value. * @param value resulting value * @param expect expected result * @return optional expected test suite result */ private String assertStringValue(final XdmValue value, final XdmValue expect) { String exp = expect.getString(); // normalize space final boolean norm = asBoolean("@normalize-space=('true','1')", expect); if(norm) exp = string(norm(token(exp))); final TokenBuilder tb = new TokenBuilder(); int c = 0; for(final XdmItem it : value) { if(c != 0) tb.add(' '); tb.add(it.getString()); c++; } final String res = norm ? string(norm(tb.finish())) : tb.toString(); return exp.equals(res) ? null : exp; } /** * Tests boolean. * @param value resulting value * @param exp expected * @return optional expected test suite result */ private static String assertBoolean(final XdmValue value, final boolean exp) { return value.getType().eq(SeqType.BLN) && value.getBoolean() == exp ? null : Util.info(exp); } /** * Tests empty sequence. * @param value resulting value * @return optional expected test suite result */ private static String assertEmpty(final XdmValue value) { return value == XdmEmpty.EMPTY ? null : ""; } /** * Tests type. * @param value resulting value * @param expect expected result * @return optional expected test suite result */ private String assertType(final XdmValue value, final XdmValue expect) { final String exp = expect.getString(); try { final XQuery query = new XQuery("$result instance of " + exp, ctx); return query.bind("result", value).value().getBoolean() ? null : Util.info("Type '%' (found: '%')", exp, value.getType().toString()); } catch(final XQueryException ex) { // should not occur return ex.getException().getMessage(); } } /** * Returns the string representation of a query result. * @param query query string * @param value optional context value * @return optional expected test suite result */ String asString(final String query, final XdmValue value) { return XQuery.string(query, value, ctx); } /** * Returns the boolean representation of a query result. * @param query query string * @param value optional context value * @return optional expected test suite result */ boolean asBoolean(final String query, final XdmValue value) { final XQuery qp = new XQuery(query, ctx).context(value); try { final XdmItem it = qp.next(); return it != null && it.getBoolean(); } finally { qp.close(); } } /** * Returns the path to a given file. * @param b base path, possibly {@code null} * @param file file name * @return path to the file */ private String file(final String b, final String file) { final String dir = b != null ? b : basePath; return dir == null ? file : new File(dir, file).getAbsolutePath(); } /** * Calculates the percentage of correct queries. * @param v value * @param t total value * @return percentage */ private static String pc(final int v, final long t) { return (t == 0 ? 100 : v * 10000 / t / 100d) + "%"; } /** * Normalizes newline characters. * @param in input string * @return result */ private static String normNL(final String in) { return in.replaceAll("\r\n|\r|\n", NL); } /** * Parses the command-line arguments, specified by the user. * @param args command-line arguments * @throws IOException I/O exception */ private void parseArguments(final String[] args) throws IOException { final Args arg = new Args(args, this, " -v [pat]" + NL + " [pat] perform tests starting with a pattern" + NL + " -a save all tests" + NL + " -d debugging mode" + NL + " -e ignore error codes" + NL + " -i also save ignored files" + NL + " -p path to the test suite" + NL + " -s print slow queries" + NL + " -v verbose output", Util.info(Text.CONSOLE, Util.name(this))); while(arg.more()) { if(arg.dash()) { final char c = arg.next(); if(c == 'v') { verbose = true; } else if(c == 'a') { all = true; } else if(c == 'd') { ctx.mprop.set(MainProp.DEBUG, true); } else if(c == 'i') { ignoring = true; } else if(c == 'e') { errors = false; } else if(c == 's') { slow = new TreeMap<Long, String>(); } else if(c == 'p') { final File f = new File(arg.string()); if(!f.isDirectory()) arg.usage(); basePath = f.getCanonicalPath(); } else { arg.usage(); } } else { single = arg.string(); maxout = Integer.MAX_VALUE; } } } /** * Structure for storing XQuery results. */ static class QT3Result { /** Query result. */ XdmValue value; /** Query exception. */ XQueryException exc; /** Query error. */ Throwable error; } /** * Returns the sandbox database path. * @return database path */ private IOFile sandbox() { return new IOFile(Prop.TMP, testid); } }
package org.jboss.apiviz; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import com.sun.javadoc.RootDoc; /** * @author The APIviz Project (apiviz-dev@lists.jboss.org) * @author Trustin Lee (tlee@redhat.com) * * @version $Rev$, $Date$ * */ public class Graphviz { private static File home; public static boolean isAvailable(RootDoc root) { String executable = Graphviz.getExecutable(root); File home = Graphviz.getHome(root); ProcessBuilder pb = new ProcessBuilder(executable, "-V"); pb.redirectErrorStream(true); if (home != null) { root.printNotice("Graphviz Home: " + home); pb.directory(home); } root.printNotice("Graphviz Executable: " + executable); Process p; try { p = pb.start(); } catch (IOException e) { return false; } BufferedReader in = new BufferedReader( new InputStreamReader(p.getInputStream())); OutputStream out = p.getOutputStream(); try { out.close(); String line = null; while((line = in.readLine()) != null) { if (line.indexOf("Graphviz") >= 0) { return true; } } return false; } catch (IOException e) { return false; } finally { try { out.close(); } catch (IOException e) { // Shouldn't happen. } try { in.close(); } catch (IOException e) { // Shouldn't happen. } for (;;) { try { p.waitFor(); break; } catch (InterruptedException e) { // Ignore } } } } public static void writeImageAndMap( RootDoc root, String diagram, File outputDirectory, String filename) throws IOException { File pngFile = new File(outputDirectory, filename + ".png"); File mapFile = new File(outputDirectory, filename + ".map"); pngFile.delete(); mapFile.delete(); ProcessBuilder pb = new ProcessBuilder( Graphviz.getExecutable(root), "-Tcmapx", "-o", mapFile.getAbsolutePath(), "-Tpng", "-o", pngFile.getAbsolutePath()); pb.redirectErrorStream(true); File home = Graphviz.getHome(root); if (home != null) { pb.directory(home); } Process p = pb.start(); BufferedReader in = new BufferedReader( new InputStreamReader(p.getInputStream())); Writer out = new OutputStreamWriter(p.getOutputStream(), "UTF-8"); try { out.write(diagram); out.close(); String line = null; while((line = in.readLine()) != null) { System.err.println(line); } } finally { try { out.close(); } catch (IOException e) { // Shouldn't happen. } try { in.close(); } catch (IOException e) { // Shouldn't happen. } for (;;) { try { int result = p.waitFor(); if (result != 0) { throw new IllegalStateException("Graphviz exited with a non-zero return value: " + result); } break; } catch (InterruptedException e) { // Ignore } } } } private static String getExecutable(RootDoc root) { String command = "dot"; try { String osName = System.getProperty("os.name"); if (osName != null && osName.indexOf("Windows") >= 0) { File path = Graphviz.getHome(root); if (path != null) { command = path.getAbsolutePath() + File.separator + "dot.exe"; } else { command = "dot.exe"; } } } catch (Exception e) { // ignore me! } return command; } private static File getHome(RootDoc root) { if (home != null) { return home; } File graphvizDir = null; try { String graphvizHome = System.getProperty("graphviz.home"); if (graphvizHome != null) { root.printNotice( "Using the 'graphviz.home' system property: " + graphvizHome); } else { root.printNotice( "The 'graphviz.home' system property was not specified."); graphvizHome = System.getenv("GRAPHVIZ_HOME"); if (graphvizHome != null) { root.printNotice( "Using the 'GRAPHVIZ_HOME' environment variable: " + graphvizHome); } else { root.printNotice( "The 'GRAPHVIZ_HOME' environment variable was not specified."); } } if (graphvizHome != null) { graphvizDir = new File(graphvizHome); if (!graphvizDir.exists() || !graphvizDir.isDirectory()) { root.printWarning( "The specified graphviz home directory does not exist: " + graphvizDir.getPath()); return null; } } else { root.printNotice( "System path will be used as graphviz home directory was not specified."); } } catch (Exception e) { // ignore... } return home = graphvizDir; } private Graphviz() { // Unused } }
import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.management.ManagementFactory; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; import java.net.Socket; import java.net.UnknownHostException; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicInteger; import org.apache.zookeeper.KeeperException.NoNodeException; import org.apache.zookeeper.ZooKeeper.States; import org.apache.zookeeper.data.Stat; import com.netflix.curator.framework.CuratorFramework; import com.netflix.curator.framework.CuratorFrameworkFactory; import com.netflix.curator.framework.api.CuratorEvent; import com.netflix.curator.framework.api.CuratorEventType; import com.netflix.curator.framework.api.CuratorListener; import com.netflix.curator.framework.listen.ListenerContainer; import com.netflix.curator.retry.RetryNTimes; public class curatorTest { int attempts; int _totalOps; int _lowerbound; Client[] _clients; String[] _hosts; int _interval; HashMap<Integer, Thread> _running; AtomicInteger _finishedTotal; int _oldTotal; int _timeCounter; int _deadline; AtomicInteger _curtotalOps; long _lastCpuTime; long _currCpuTime; long _startCpuTime; int _increment; testStat _currentTest; String _data; boolean _sync; BufferedWriter _bw; CyclicBarrier _barrier; private enum testStat{ READ, SETSINGLE, SETMUTI, CREATE, DELETE, CLEANING, UNDEFINED } private class Client implements Runnable{ String _host;//the host this client is connecting to CuratorFramework _client;//the actual client testStat _stat;//current test int _attempts; String _path; int _id; int count; int countTime; Timer _timer; boolean _syncfin; int _highestN; int _highestDeleted; BufferedWriter _records; int getTimeCount(){ return countTime; } int getOpsCount(){ return count; } Client(String host, String namespace, int attempts, int id) throws IOException { _host = host; _client = CuratorFrameworkFactory.builder() .connectString(_host).namespace(namespace) .retryPolicy(new RetryNTimes(Integer.MAX_VALUE,1000)) .connectionTimeoutMs(5000).build(); _stat = testStat.UNDEFINED; _attempts = attempts; _id = id; _path = "/client"+id; _timer = new Timer(); _highestN = 0; _highestDeleted = 0; } void setStat(testStat stat){ _stat = stat; } void zkAdminCommand(String cmd) { String host = _host.split(":")[0]; Socket socket = null; OutputStream os = null; InputStream is = null; try { socket = new Socket(host, 2181); os = socket.getOutputStream(); is = socket.getInputStream(); os.write(cmd.getBytes()); os.flush(); byte[] b = new byte[1000]; while (is.read(b) >= 0) System.err.println(_id+" " + cmd + " command:\n" + new String(b)); System.err.println(_id+" " + cmd + " command: done."); is.close(); os.close(); socket.close(); } catch (UnknownHostException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } @Override public void run(){ if(!_client.isStarted()) _client.start(); _syncfin = false; if(_stat == testStat.CLEANING){ try { doClean(); } catch (Exception e2) { e2.printStackTrace(); } synchronized(_running){ _running.remove(new Integer(_id)); if(_running.size() == 0) _running.notify(); } return; } zkAdminCommand("srst"); try { _barrier.await(); } catch (InterruptedException e1) { e1.printStackTrace(); } catch (BrokenBarrierException e1) { e1.printStackTrace(); } count = 0; countTime = 0; try{ Stat stat = _client.checkExists().forPath(_path); if(stat == null){ _client.create().forPath(_path, _data.getBytes()); } //create the timer _timer.scheduleAtFixedRate(new TimerTask(){ @Override public void run() { //this can be used to measure rate of each thread //at this moment, it is not necessary countTime++; if(countTime == _deadline){ this.cancel(); if(!_sync){ synchronized(_timer){ _timer.notify(); } }else{ _syncfin = true; } } } }, _interval, _interval); try { _records = new BufferedWriter(new FileWriter(new File(_id+"-"+_stat+"_timings.dat"))); } catch (IOException e3) { e3.printStackTrace(); } if(_sync){ performSync(_stat); }else{ ListenerContainer<CuratorListener> listeners = (ListenerContainer<CuratorListener>)_client.getCuratorListenable(); Listener listener = new Listener(this, _stat); listeners.addListener(listener); submitAsync(_attempts, _stat); //blocks until awaken by timer synchronized(_timer){ _timer.wait(); } listeners.removeListener(listener); } /*stat = _client.checkExists().forPath(_path); if(stat != null){ _client.delete().forPath(_path); }*/ }catch(Exception e){ e.printStackTrace(); } zkAdminCommand("stat"); try { _records.close(); } catch (IOException e) { e.printStackTrace(); } System.err.println(_id+"-i'm done, reqs:"+count); synchronized(_running){ _running.remove(new Integer(_id)); if(_running.size() == 0) _running.notify(); } } void performSync(testStat type) throws Exception{ for(int i = 0 ;i < _curtotalOps.get();i++){ double time = ((double)System.nanoTime() - _startCpuTime)/1000000000.0; switch(type){ case READ: _client.getData().forPath(_path); break; case SETSINGLE: _client.setData().forPath(_path,new String(_data+i).getBytes()); break; case SETMUTI: try{ _client.setData().forPath(_path+"/"+(count%_highestN),new String(_data+i).getBytes()); }catch(NoNodeException e){ } break; case CREATE: _client.create().forPath(_path+"/"+count,new String(_data+i).getBytes()); _highestN++; break; case DELETE: try{ _client.delete().forPath(_path+"/"+count); }catch(NoNodeException e){ } } recordTimes(new Double(time), _records); count ++; _finishedTotal.incrementAndGet(); if(_syncfin) break; } } void submitAsync(int n, testStat type) throws Exception { for (int i = 0; i < n; i++) { double time = ((double)System.nanoTime() - _startCpuTime)/1000000000.0; switch(type){ case READ: _client.getData().inBackground(new Double(time)).forPath(_path); break; case SETSINGLE: _client.setData().inBackground(new Double(time)).forPath(_path, new String(_data+i).getBytes()); break; case SETMUTI: _client.setData().inBackground(new Double(time)).forPath(_path+"/"+(count%_highestN), new String(_data).getBytes()); break; case CREATE: _client.create().inBackground(new Double(time)).forPath(_path+"/"+count, new String(_data).getBytes()); _highestN++; break; case DELETE: _client.delete().inBackground(new Double(time)).forPath(_path+"/"+count); _highestDeleted++; if(_highestDeleted >= _highestN){ zkAdminCommand("stat"); synchronized(_running){ _running.remove(new Integer(_id)); if(_running.size() == 0) _running.notify(); } _timer.cancel(); count++; return; } } count++; } } void doClean() throws Exception{ List<String> children; do{ children = _client.getChildren().forPath(_path); for(String child : children){ _client.delete().inBackground().forPath(_path+"/"+child); } Thread.sleep(2000); }while(children.size()!=0); } private class Listener implements CuratorListener{ Client _client; // client listener listens for testStat _stat;//current test Listener(Client client, testStat stat) { _client = client; _stat = stat; } @Override public void eventReceived(CuratorFramework arg0, CuratorEvent arg1) throws Exception { CuratorEventType type = arg1.getType(); // Ensure that the event we received applies to current test if ((type == CuratorEventType.GET_DATA && _currentTest == testStat.READ) || (type == CuratorEventType.SET_DATA && _currentTest == testStat.SETMUTI) || (type == CuratorEventType.SET_DATA && _currentTest == testStat.SETSINGLE) || (type == CuratorEventType.DELETE && _currentTest == testStat.DELETE) || (type == CuratorEventType.CREATE && _currentTest == testStat.CREATE)) { _finishedTotal.incrementAndGet(); recordEvent(arg1, _records); _client.submitAsync(1, _stat); } /*byte[] d = arg1.getData() ; String a = new String(d); System.out.println(">"+a+"<");*/ } } } void recordTimes(Double firstTime, BufferedWriter bw) throws IOException{ double newtime = ((double)System.nanoTime() - _startCpuTime)/1000000000.0; String newTimeStr = Double.toString(newtime); bw.write(firstTime.toString()+" "+newTimeStr+"\n"); } void recordEvent(CuratorEvent arg1, BufferedWriter bw) throws IOException{ Double oldctx = (Double)arg1.getContext(); recordTimes(oldctx, bw); } double getTime(){ /*return the max time consumed by each thread*/ double ret = 0; for(int i = 0;i<_clients.length;i++){ if(ret < _clients[i].getTimeCount()) ret = _clients[i].getTimeCount(); } return (ret * _interval)/1000.0; } int getTotalOps(){ /*return the total number of reqs done by all threads*/ int ret = 0; for(int i = 0;i<_clients.length;i++){ ret += _clients[i].getOpsCount(); } return ret; } public void doTest(testStat stat) throws InterruptedException{ _finishedTotal = new AtomicInteger(0); _oldTotal = 0; _curtotalOps = new AtomicInteger(_totalOps); try{ _bw = new BufferedWriter(new FileWriter(new File(stat+".dat"))); }catch(IOException e){ e.printStackTrace(); } _startCpuTime = System.nanoTime(); _lastCpuTime = _startCpuTime; for(int i = 0;i<_hosts.length;i++){ _clients[i].setStat(stat); Thread tmp = new Thread(_clients[i]); _running.put(new Integer(i), tmp); tmp.start(); } _currentTest = stat; try { _barrier.await(); } catch (BrokenBarrierException e2) { e2.printStackTrace(); } Timer timer = new Timer(); timer.scheduleAtFixedRate(new TimerTask(){ @Override public void run() { _timeCounter ++; if(_currentTest != testStat.UNDEFINED){ int finished = _finishedTotal.get(); if(finished == 0){ //this means even the first batch of operations haven't return; } /*if(_startCpuTime == 0){ _startCpuTime = System.nanoTime(); _lastCpuTime = _startCpuTime; _currCpuTime = _startCpuTime; }*/ //System.err.println("increment:"+(finished - _oldTotal)); _currCpuTime = System.nanoTime(); String msg = _currentTest+" "+((double)(_currCpuTime - _startCpuTime)/1000000000.0)+" " +((double)(finished - _oldTotal)/((double)(_currCpuTime - _lastCpuTime)/1000000000.0)); // System.out.println(msg); _lastCpuTime = _currCpuTime; if(_bw != null){ try { if (finished - _oldTotal > 0) { _bw.write(msg+"\n"); } } catch (IOException e1) { e1.printStackTrace(); } } _oldTotal = finished; if(_curtotalOps.get() - finished <= _lowerbound){ _increment = _totalOps - (_curtotalOps.get() - finished); try{ int avg = _increment / _clients.length; if(!_sync){ /* for(int i = 0;i<_clients.length;i++){ _clients[i].submitAsync(avg, _currentTest); } _curtotalOps.getAndAdd(_increment); */ }else{ _curtotalOps.getAndAdd(10000); } }catch(Exception e){ e.printStackTrace(); } } } } }, _interval, _interval); synchronized(_running){ _running.wait(); } _currentTest = testStat.UNDEFINED; timer.cancel(); try { _bw.close(); } catch (IOException e) { e.printStackTrace(); } double time = getTime(); System.err.println(stat+" finished, time elapsed(sec):"+time +" operations:"+_finishedTotal.get()+" avg rate:"+_finishedTotal.get()/time); } public void launch(int totaltime, boolean sync) throws InterruptedException{ _timeCounter = 0; _finishedTotal = new AtomicInteger(0); _oldTotal = 0; _deadline = totaltime / _interval; _sync = sync; /*this is where all tests start*/ /*Read requests done done by zookeeper extremely * fast compared with write requests. If the time * interval and threshold are not chosen appropriately, * it could happen that when the timer awakes, all requests * have already been finished. In this case, the output * of read test doesn't reflect the actual rate of * read requests. */ doTest(testStat.READ); doTest(testStat.READ); doTest(testStat.SETSINGLE); doTest(testStat.CREATE); doTest(testStat.SETMUTI); /*In the test, node creation and deletion tests are * done by creating a lot of nodes at first and then * deleting them. Since both of these two tests run * for a certain time, there is no guarantee that which * requests is more than the other. If there are more * delete requests than create requests, the extra delete * requests would end up not actually deleting anything. * Though these requests are sent and processed by * zookeeper server anyway, this could still be an issue.*/ doTest(testStat.DELETE); System.err.println("tests done cleaning"); for(int i = 0;i<_hosts.length;i++){ _clients[i].setStat(testStat.CLEANING); Thread tmp = new Thread(_clients[i]); _running.put(new Integer(i), tmp); tmp.start(); } synchronized(_running){ _running.wait(); } System.err.println("all finished"); } public curatorTest(String[] hs, int interval, int ops, int lowerbound) throws IOException{ /* * ops here represents the number of total number of ops submitted to server * say 10000, then if it falls below 2000, submit another 8000 to reach 10000 * */ _totalOps = ops; _lowerbound = lowerbound; _hosts = hs; _clients = new Client[hs.length]; _interval = interval; int avgOps = ops/hs.length; for(int i = 0;i<hs.length;i++){ _clients[i] = new Client(hs[i], "/zkTest", avgOps, i); } _running = new HashMap<Integer,Thread>(); _deadline = 0; _increment = ops - lowerbound; _data = "!!!!!"; for(int i = 0;i<19;i++){ _data += "!!!!!"; } _barrier = new CyclicBarrier(_clients.length+1); System.err.println(_barrier.getParties()); } /* * args[0] is the interval * args[1] is the total number of requests, say 16000 requests are submitted, and * whenever it is below 4000, submit another 16000 - 4000 requests, here args[1] = 16000 * args[2] is the threshold, it is 4000 in above example * args[3] is how much time you want to run the test, in millisecond * args[4] is syn or async test, 0 for async, non-0 for sync */ public static void main(String[] args) throws Exception{ if(args.length != 5){ System.out.println("wrong parameters"); } String[] hosts = new String[5]; hosts[0] = "host1.pane.cs.brown.edu:2181"; hosts[1] = "host2.pane.cs.brown.edu:2181"; hosts[2] = "host3.pane.cs.brown.edu:2181"; hosts[3] = "host4.pane.cs.brown.edu:2181"; hosts[4] = "host5.pane.cs.brown.edu:2181"; /* hosts[0] = "euc03.cs.brown.edu:2181"; hosts[1] = "euc04.cs.brown.edu:2181"; hosts[2] = "euc05.cs.brown.edu:2181"; hosts[3] = "euc06.cs.brown.edu:2181"; hosts[4] = "euc07.cs.brown.edu:2181"; */ int interval = Integer.parseInt(args[0]); int totalnumber = Integer.parseInt(args[1]); int threshold = Integer.parseInt(args[2]); int time = Integer.parseInt(args[3]); boolean sync = Integer.parseInt(args[4]) == 0 ? false : true; /*String[] hosts = new String[1]; hosts[0] = "localhost:2181";*/ System.err.println(interval+" "+totalnumber+" "+threshold+" "+time+" "+sync); curatorTest test = new curatorTest(hosts, interval, totalnumber, threshold); test.launch(time, sync); System.exit(0); } }
package com.jme3.font; import java.util.LinkedList; import com.jme3.font.BitmapFont.Align; import com.jme3.font.BitmapFont.VAlign; import com.jme3.font.ColorTags.Range; import com.jme3.math.ColorRGBA; /** * Manage and align LetterQuads * @author YongHoon */ class Letters { private final LetterQuad head; private final LetterQuad tail; private final BitmapFont font; private LetterQuad current; private StringBlock block; private float totalWidth; private float totalHeight; private ColorTags colorTags = new ColorTags(); Letters(BitmapFont font, StringBlock bound, boolean rightToLeft) { final String text = bound.getText(); this.block = bound; this.font = font; head = new LetterQuad(font, rightToLeft); tail = new LetterQuad(font, rightToLeft); setText(text); } void setText(final String text) { colorTags.setText(text); String plainText = colorTags.getPlainText(); head.setNext(tail); tail.setPrevious(head); current = head; if (text != null && plainText.length() > 0) { LetterQuad l = head; for (int i = 0; i < plainText.length(); i++) { l = l.addNextCharacter(plainText.charAt(i)); } } LinkedList<Range> ranges = colorTags.getTags(); if (!ranges.isEmpty()) { for (int i = 0; i < ranges.size()-1; i++) { Range start = ranges.get(i); Range end = ranges.get(i+1); setColor(start.start, end.start, start.color); } Range end = ranges.getLast(); setColor(end.start, plainText.length(), end.color); } invalidate(); } LetterQuad getHead() { return head; } LetterQuad getTail() { return tail; } void update() { LetterQuad l = head; int lineCount = 1; BitmapCharacter ellipsis = font.getCharSet().getCharacter(block.getEllipsisChar()); float ellipsisWidth = ellipsis!=null? ellipsis.getWidth()*getScale(): 0; while (!l.isTail()) { if (l.isInvalid()) { l.update(block); if (l.isInvalid(block)) { switch (block.getLineWrapMode()) { case Character: lineWrap(l); lineCount++; break; case Word: if (!l.isBlank()) { // search last blank character before this word LetterQuad blank = l; while (!blank.isBlank()) { if (blank.isLineStart() || blank.isHead()) { lineWrap(l); lineCount++; blank = null; break; } blank = blank.getPrevious(); } if (blank != null) { blank.setEndOfLine(); lineCount++; while (blank != l) { blank = blank.getNext(); blank.invalidate(); blank.update(block); } } } break; case NoWrap: // search last blank character before this word LetterQuad cursor = l.getPrevious(); while (cursor.isInvalid(block, ellipsisWidth) && !cursor.isLineStart()) { cursor = cursor.getPrevious(); } cursor.setBitmapChar(ellipsis); cursor.update(block); cursor = cursor.getNext(); while (!cursor.isTail() && !cursor.isLineFeed()) { cursor.setBitmapChar(null); cursor.update(block); cursor = cursor.getNext(); } break; } } } else if (current.isInvalid(block)) { invalidate(current); } if (l.isEndOfLine()) { lineCount++; } l = l.getNext(); } align(); block.setLineCount(lineCount); rewind(); } private void align() { final Align alignment = block.getAlignment(); final VAlign valignment = block.getVerticalAlignment(); if (block.getTextBox() == null || (alignment == Align.Left && valignment == VAlign.Top)) return; LetterQuad cursor = tail.getPrevious(); cursor.setEndOfLine(); final float width = block.getTextBox().width; final float height = block.getTextBox().height; float lineWidth = 0; float gapX = 0; float gapY = 0; validateSize(); if (totalHeight < height) { // align vertically only for no overflow switch (valignment) { case Top: gapY = 0; break; case Center: gapY = (height-totalHeight)*0.5f; break; case Bottom: gapY = height-totalHeight; break; } } while (!cursor.isHead()) { if (cursor.isEndOfLine()) { lineWidth = cursor.getX1()-block.getTextBox().x; if (alignment == Align.Center) { gapX = (width-lineWidth)/2; } else if (alignment == Align.Right) { gapX = width-lineWidth; } else { gapX = 0; } } cursor.setAlignment(gapX, gapY); cursor = cursor.getPrevious(); } } private void lineWrap(LetterQuad l) { if (l.isHead() || l.isBlank()) return; l.getPrevious().setEndOfLine(); l.invalidate(); l.update(block); // TODO: update from l } float getCharacterX0() { return current.getX0(); } float getCharacterY0() { return current.getY0(); } float getCharacterX1() { return current.getX1(); } float getCharacterY1() { return current.getY1(); } float getCharacterAlignX() { return current.getAlignX(); } float getCharacterAlignY() { return current.getAlignY(); } float getCharacterWidth() { return current.getWidth(); } float getCharacterHeight() { return current.getHeight(); } public boolean nextCharacter() { if (current.isTail()) return false; current = current.getNext(); return true; } public int getCharacterSetPage() { return current.getBitmapChar().getPage(); } public LetterQuad getQuad() { return current; } public void rewind() { current = head; } public void invalidate() { invalidate(head); } public void invalidate(LetterQuad cursor) { totalWidth = -1; totalHeight = -1; while (!cursor.isTail() && !cursor.isInvalid()) { cursor.invalidate(); cursor = cursor.getNext(); } } float getScale() { return block.getSize() / font.getCharSet().getRenderedSize(); } public boolean isPrintable() { return current.getBitmapChar() != null; } float getTotalWidth() { validateSize(); return totalWidth; } float getTotalHeight() { validateSize(); return totalHeight; } void validateSize() { if (totalWidth < 0) { LetterQuad l = head; while (!l.isTail()) { totalWidth = Math.max(totalWidth, l.getX1()); l = l.getNext(); totalHeight = Math.max(totalHeight, -l.getY1()); } } } /** * @param start start index to set style. inclusive. * @param end end index to set style. EXCLUSIVE. * @param style */ void setStyle(int start, int end, int style) { LetterQuad cursor = head.getNext(); while (!cursor.isTail()) { if (cursor.getIndex() >= start && cursor.getIndex() < end) { cursor.setStyle(style); } cursor = cursor.getNext(); } } /** * @param start start index to set style. inclusive. * @param end end index to set style. EXCLUSIVE. * @param color */ void setColor(int start, int end, ColorRGBA color) { LetterQuad cursor = head.getNext(); while (!cursor.isTail()) { if (cursor.getIndex() >= start && cursor.getIndex() < end) { cursor.setColor(color); } cursor = cursor.getNext(); } } }
package ru.ifmo.ctddev.gmwcs; import ilog.concert.IloException; import joptsimple.OptionException; import joptsimple.OptionParser; import joptsimple.OptionSet; import org.jgrapht.UndirectedGraph; import ru.ifmo.ctddev.gmwcs.graph.*; import java.io.File; import java.io.IOException; import java.text.ParseException; import java.util.Collections; import java.util.List; import static java.util.Arrays.asList; public class Main { public static OptionSet parseArgs(String args[]) throws IOException { OptionParser optionParser = new OptionParser(); optionParser.allowsUnrecognizedOptions(); optionParser.acceptsAll(asList("h", "help"), "Print a short help message"); OptionSet optionSet = optionParser.parse(args); optionParser.acceptsAll(asList("n", "nodes"), "Node list file").withRequiredArg().required(); optionParser.acceptsAll(asList("e", "edges"), "Edge list file").withRequiredArg().required(); optionParser.acceptsAll(asList("m", "threads"), "Number of threads").withRequiredArg() .ofType(Integer.class).defaultsTo(1); optionParser.acceptsAll(asList("t", "timelimit"), "Time limit in seconds, 0 - unlimited").withRequiredArg() .ofType(Integer.class).defaultsTo(0); optionParser.acceptsAll(Collections.singletonList("b"), "Break symmetries"); optionParser.acceptsAll(Collections.singletonList("f"), "Fraction of time allocated for the biggest bicomponent in each component") .withRequiredArg().ofType(Double.class).defaultsTo(0.8); optionParser.acceptsAll(asList("s", "silence"), "print only short description if size of bicomponent less " + "than <silence>") .withRequiredArg().ofType(Integer.class).defaultsTo(50); if (optionSet.has("h")) { optionParser.printHelpOn(System.out); System.exit(0); } try { optionSet = optionParser.parse(args); } catch (OptionException e) { System.err.println(e.getMessage()); System.err.println(); optionParser.printHelpOn(System.err); System.exit(1); } return optionSet; } public static void main(String[] args) { OptionSet optionSet = null; try { optionSet = parseArgs(args); } catch (IOException e) { // We can't say anything. Error occurred while printing to stderr. System.exit(2); } int tl = (Integer) optionSet.valueOf("timelimit"); int threadNum = (Integer) optionSet.valueOf("threads"); int silence = (Integer) optionSet.valueOf("silence"); double mainFraction = (Double) optionSet.valueOf("f"); File nodeFile = new File((String) optionSet.valueOf("nodes")); File edgeFile = new File((String) optionSet.valueOf("edges")); Solver solver = new RLTSolver(optionSet.has("b"), silence); GraphIO graphIO = new SimpleIO(nodeFile, new File(nodeFile.toString() + ".out"), edgeFile, new File(edgeFile.toString() + ".out")); try { UndirectedGraph<Node, Edge> graph = graphIO.read(); double timeLimit = tl <= 0 ? Double.POSITIVE_INFINITY : tl; List<Unit> units = solver.solve(graph, threadNum, timeLimit, mainFraction); graphIO.write(units); } catch (ParseException e) { System.err.println("Couldn't parse input files: " + e.getMessage() + " " + e.getErrorOffset()); } catch (IloException e) { System.err.println("CPLEX error:" + e.getMessage()); } catch (IOException e) { System.err.println("Error occurred while reading/writing input/output files"); } } }
package se.kth.bbc.lims; /** * Constants class to facilitate deployment on different servers. * * @author stig */ public class Constants { //public static final String server = "LOCAL"; public static final String server = "SNURRAN"; public static final String UPLOAD_DIR = server.equals("LOCAL") ? "/home/stig/tst" : "/tmp"; public static final String NAMENODE_URI = server.equals("LOCAL") ? "hdfs://localhost:8020":"hdfs://snurran.sics.se:9999"; public static final String LOCAL_APPMASTER_DIR = server.equals("LOCAL") ? "/home/stig/tst/appMaster" : "/tmp/appMaster"; public static final String LOCAL_EXTRA_DIR = server.equals("LOCAL") ? "/home/stig/tst/extraFiles" : "/tmp/extraFiles"; }
package seedu.address.ui; import javafx.fxml.FXML; import javafx.scene.Node; import javafx.scene.control.Label; import javafx.scene.layout.HBox; import javafx.scene.paint.Color; import seedu.address.model.task.ReadOnlyTask; public class TaskCard extends UiPart{ private static final String FXML = "TaskListCard.fxml"; @FXML private HBox cardPane; @FXML private Label detail; @FXML private Label id; @FXML private Label dbd; @FXML private Label dbt; @FXML private Label priority; @FXML private Label tags; private ReadOnlyTask task; private int displayedIndex; public TaskCard(){ } public static TaskCard load(ReadOnlyTask task, int displayedIndex){ TaskCard card = new TaskCard(); card.task = task; card.displayedIndex = displayedIndex; return UiPartLoader.loadUiPart(card); } @FXML public void initialize() { detail.setText(task.getDetail().details); id.setText(displayedIndex + ". "); dbd.setText(task.getDueByDate().getFriendlyString()); dbt.setText(task.getDueByTime().getFriendlyString()); priority.setText(task.getPriority().value); tags.setText(task.tagsString()); tags.setTextFill(Color.GOLD); } public HBox getLayout() { return cardPane; } @Override public void setNode(Node node) { cardPane = (HBox)node; } @Override public String getFxmlPath() { return FXML; } }
package com.highstreet.technologies.odl.app.impl.listener; import com.highstreet.technologies.odl.app.impl.tools.BandwidthCalculator; import com.highstreet.technologies.odl.app.impl.tools.NeExecutor; import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException; import org.opendaylight.yang.gen.v1.urn.onf.params.xml.ns.yang.microwave.model.rev170324.*; import org.opendaylight.yang.gen.v1.urn.onf.params.xml.ns.yang.microwave.model.rev170324.mw.air._interface.pac.AirInterfaceConfiguration; import org.opendaylight.yang.gen.v1.urn.onf.params.xml.ns.yang.microwave.model.rev170324.mw.air._interface.pac.AirInterfaceStatus; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Properties; public class ACMListener implements MicrowaveModelListener { public ACMListener(NeExecutor ne) { this.ne = ne; } private static final Logger LOG = LoggerFactory.getLogger(ACMListener.class); private static Properties properties = null; static { properties = new Properties(); try { properties.load(ACMListener.class.getClassLoader().getResourceAsStream("conf.properties")); } catch (IOException e) { LOG.warn("", e); } } private final NeExecutor ne; @Override public void onAttributeValueChangedNotification( AttributeValueChangedNotification notification) { // AttributeValueChangedNotification{getAttributeName=modulationCur, getCounter=8, getNewValue=64, getObjectIdRef=UniversalId [_value=LP-MWPS-AIR-5-1], getTimeStamp=DateAndTime [_value=2017-06-07T05:00:31.9397Z], augmentations={}} // txCapacity = AirInterface::AirInterfaceConfiguration::txChannelBandwidth * log2(AirInterface::AirInterfaceStatus::modulationCur) * AirInterface::AirInterfaceStatus::informationRateCur / 1,15 ; // txCapacity = txChannelBandwidth * log2(modulationCur) * informationRateCur / 1,15 ; try { if (notification.getAttributeName().equalsIgnoreCase("modulationCur")) { String lpId_airInterface = notification.getObjectIdRef().getValue(); if (ne.isLtpOfThisOnPath(lpId_airInterface)) { AirInterfaceConfiguration airInterfaceConfiguration = ne.getUnderAirPac( lpId_airInterface, AirInterfaceConfiguration.class); AirInterfaceStatus airInterfaceStatus = ne.getUnderAirPac( lpId_airInterface, AirInterfaceStatus.class); Double txCapacity = new BandwidthCalculator( airInterfaceConfiguration.getTxChannelBandwidth(), airInterfaceStatus.getModulationCur(), airInterfaceStatus.getCodeRateCur()).calc(); if (txCapacity < Double.valueOf(properties.getProperty("BANDWIDTH"))) { ne.reportSwitch(); } } } } catch (ReadFailedException e) { LOG.warn("handling attribute change: " + notification + " caught exception!", e); } } @Override public void onObjectCreationNotification( ObjectCreationNotification notification) { } @Override public void onObjectDeletionNotification( ObjectDeletionNotification notification) { } @Override public void onProblemNotification( ProblemNotification notification) { } }
package seedu.agendum.ui; import javafx.fxml.FXML; import javafx.scene.Node; import javafx.scene.control.Label; import javafx.scene.layout.HBox; import seedu.agendum.model.task.ReadOnlyTask; public class TaskCard extends UiPart{ private static final String FXML = "TaskListCard.fxml"; @FXML private HBox cardPane; @FXML private Label name; @FXML private Label id; @FXML private Label tags; private ReadOnlyTask task; private int displayedIndex; public TaskCard(){ } public static TaskCard load(ReadOnlyTask task, int displayedIndex){ TaskCard card = new TaskCard(); card.task = task; card.displayedIndex = displayedIndex; return UiPartLoader.loadUiPart(card); } @FXML public void initialize() { name.setText(task.getName().fullName); id.setText(displayedIndex + ". "); tags.setText(task.tagsString()); if (task.isCompleted()) { name.setText(task.getName().fullName + " (done!)"); cardPane.setStyle("-fx-background-color: aquamarine"); } else { cardPane.setStyle("-fx-background-color: salmon"); } } public HBox getLayout() { return cardPane; } @Override public void setNode(Node node) { cardPane = (HBox)node; } @Override public String getFxmlPath() { return FXML; } }
package edu.duke.cabig.c3pr.web.registration; import java.util.Iterator; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.log4j.Logger; import org.springframework.validation.BindException; import org.springframework.web.servlet.ModelAndView; import org.springframework.web.servlet.View; import edu.duke.cabig.c3pr.domain.StudySubject; import edu.duke.cabig.c3pr.domain.StudySubjectConsentVersion; import edu.duke.cabig.c3pr.exception.C3PRCodedRuntimeException; import edu.duke.cabig.c3pr.utils.web.ControllerTools; import edu.duke.cabig.c3pr.utils.web.WebUtils; import edu.duke.cabig.c3pr.web.registration.tabs.AssignArmTab; import edu.duke.cabig.c3pr.web.registration.tabs.CompanionRegistrationTab; import edu.duke.cabig.c3pr.web.registration.tabs.EligibilityCriteriaTab; import edu.duke.cabig.c3pr.web.registration.tabs.EnrollmentDetailsTab; import edu.duke.cabig.c3pr.web.registration.tabs.ReviewSubmitTab; import edu.duke.cabig.c3pr.web.registration.tabs.SearchStudySubjectTab; import edu.duke.cabig.c3pr.web.registration.tabs.StratificationTab; import gov.nih.nci.cabig.ctms.web.tabs.Flow; /** * @author Ramakrishna * */ public class CreateRegistrationController<C extends StudySubjectWrapper> extends RegistrationController<C> { /** * Logger for this class */ private static final Logger logger = Logger.getLogger(CreateRegistrationController.class); public CreateRegistrationController() { super("Create Registration"); } @Override protected void intializeFlows(Flow flow) { flow.addTab(new SearchStudySubjectTab()); flow.addTab(new EnrollmentDetailsTab()); flow.addTab(new EligibilityCriteriaTab()); flow.addTab(new StratificationTab()); flow.addTab(new AssignArmTab()); flow.addTab(new CompanionRegistrationTab()); flow.addTab(new ReviewSubmitTab()); setFlow(flow); } @Override protected boolean suppressValidation(HttpServletRequest request, Object command, BindException errors) { if(WebUtils.getPreviousPage(request) == -1){ return true; } if (WebUtils.getPreviousPage(request)== 1){ return !WebUtils.hasSubmitParameter(request, "_validateForm"); } return super.suppressValidation(request, command, errors); } @Override protected ModelAndView processFinish(HttpServletRequest request, HttpServletResponse response, Object command, BindException errors) throws Exception { StudySubjectWrapper wrapper = (StudySubjectWrapper) command; StudySubject studySubject = wrapper.getStudySubject(); // remove armNotAvailable request attribute if already present if(request.getAttribute("armNotAvailable")!=null){ request.removeAttribute("armNotAvailable"); } if(wrapper.getShouldReserve()==null){ studySubject=studySubjectRepository.save(studySubject); }else if(wrapper.getShouldReserve()){ studySubject=studySubjectRepository.reserve(studySubject.getIdentifiers()); }else if(wrapper.getShouldRegister()){ studySubject=studySubjectRepository.register(studySubject.getIdentifiers()); }else if(wrapper.getShouldEnroll()){ try { studySubject=studySubjectRepository.enroll(studySubject); } catch (C3PRCodedRuntimeException e) { // Book exhausted message is non-recoverable. It displays an error on the UI if(e.getExceptionCode()==234){ request.setAttribute("armNotAvailable", true); return showPage(request, errors, 6); } // TODO Handle multisite error seperately and elegantly. for now eat the error } } if (logger.isDebugEnabled()) { logger.debug("processFinish(HttpServletRequest, HttpServletResponse, Object, BindException) - registration service call over"); //$NON-NLS-1$ } return new ModelAndView("redirect:confirm?"+ControllerTools.createParameterString(studySubject.getSystemAssignedIdentifiers().get(0))); } }
package seedu.geekeep.model; import java.util.Set; import seedu.geekeep.commons.core.UnmodifiableObservableList; import seedu.geekeep.model.task.ReadOnlyTask; import seedu.geekeep.model.task.Task; import seedu.geekeep.model.task.UniqueTaskList; import seedu.geekeep.model.task.UniqueTaskList.DuplicateTaskException; public interface Model { /** Adds the given task */ void addTask(Task task) throws UniqueTaskList.DuplicateTaskException; /** Deletes the task */ void deleteTask(ReadOnlyTask target) throws UniqueTaskList.TaskNotFoundException; /** Returns the filtered task list as an {@code UnmodifiableObservableList<ReadOnlyTask>} */ UnmodifiableObservableList<ReadOnlyTask> getFilteredTaskList(); /** Returns the task manager */ ReadOnlyTaskManager getTaskManager(); /** Clears existing backing model and replaces with the provided new data. */ void resetData(ReadOnlyTaskManager newData); /** Updates the filter of the filtered task list to show all tasks */ void updateFilteredListToShowAll(); /** Updates the filter of the filtered task list to filter by the given keywords */ void updateFilteredTaskList(Set<String> keywords); /** Updates the filter of the filtered task list to filter by status of the tasks */ void updateFilteredTaskListToShowDone(); /** Updates the filter of the filtered task list to filter by status of the tasks */ void updateFilteredTaskListToShowUndone(); /** * Updates the person located at {@code filteredTaskListIndex} with {@code editedTask}. * * @throws DuplicateTaskException * if updating the task's details causes the task to be equivalent to another existing person in the * list. * @throws IndexOutOfBoundsException * if {@code filteredTaskListIndex} < 0 or >= the size of the filtered list. */ void updateTask(int filteredTaskListIndex, ReadOnlyTask editedTask) throws UniqueTaskList.DuplicateTaskException; /** Mark the specified task as done */ void markTaskDone(int filteredTaskListIndex); /** Mark the specified task as undone */ void markTaskUndone(int filteredTaskListIndex); }
package techreborn.init; import net.minecraft.block.material.Material; import net.minecraftforge.fluids.Fluid; import net.minecraftforge.fluids.FluidRegistry; import net.minecraftforge.fml.common.registry.GameRegistry; import reborncore.api.fuel.FluidPowerManager; import techreborn.blocks.fluid.BlockFluidBase; import techreborn.blocks.fluid.BlockFluidTechReborn; import techreborn.blocks.fluid.TechRebornFluid; import techreborn.lib.ModInfo; public class ModFluids { public static Fluid fluidberylium = new TechRebornFluid("fluidberylium"); public static BlockFluidBase BlockFluidBerylium; public static Fluid fluidcalcium = new TechRebornFluid("fluidcalcium"); public static BlockFluidBase BlockFluidCalcium; public static Fluid fluidcalciumcarbonate = new TechRebornFluid("fluidcalciumcarbonate"); public static BlockFluidBase BlockFluidCalciumCarbonate; public static Fluid fluidChlorite = new TechRebornFluid("fluidchlorite"); public static BlockFluidBase BlockFluidChlorite; public static Fluid fluidDeuterium = new TechRebornFluid("fluiddeuterium"); public static BlockFluidBase BlockFluidDeuterium; public static Fluid fluidGlyceryl = new TechRebornFluid("fluidglyceryl"); public static BlockFluidBase BlockFluidGlyceryl; public static Fluid fluidHelium = new TechRebornFluid("fluidhelium"); public static BlockFluidBase BlockFluidHelium; public static Fluid fluidHelium3 = new TechRebornFluid("fluidhelium3"); public static BlockFluidBase BlockFluidHelium3; public static Fluid fluidHeliumplasma = new TechRebornFluid("fluidheliumplasma"); public static BlockFluidBase BlockFluidHeliumplasma; public static Fluid fluidHydrogen = new TechRebornFluid("fluidhydrogen"); public static BlockFluidBase BlockFluidHydrogen; public static Fluid fluidLithium = new TechRebornFluid("fluidlithium"); public static BlockFluidBase BlockFluidLithium; public static Fluid fluidMercury = new TechRebornFluid("fluidmercury"); public static BlockFluidBase BlockFluidMercury; public static Fluid fluidMethane = new TechRebornFluid("fluidmethane"); public static BlockFluidBase BlockFluidMethane; public static Fluid fluidNitrocoalfuel = new TechRebornFluid("fluidnitrocoalfuel"); public static BlockFluidBase BlockFluidNitrocoalfuel; public static Fluid fluidNitrofuel = new TechRebornFluid("fluidnitrofuel"); public static BlockFluidBase BlockFluidNitrofuel; public static Fluid fluidNitrogen = new TechRebornFluid("fluidnitrogen"); public static BlockFluidBase BlockFluidNitrogen; public static Fluid fluidNitrogendioxide = new TechRebornFluid("fluidnitrogendioxide"); public static BlockFluidBase BlockFluidNitrogendioxide; public static Fluid fluidPotassium = new TechRebornFluid("fluidpotassium"); public static BlockFluidBase BlockFluidPotassium; public static Fluid fluidSilicon = new TechRebornFluid("fluidsilicon"); public static BlockFluidBase BlockFluidSilicon; public static Fluid fluidSodium = new TechRebornFluid("fluidsodium"); public static BlockFluidBase BlockFluidSodium; public static Fluid fluidSodiumpersulfate = new TechRebornFluid("fluidsodiumpersulfate"); public static BlockFluidBase BlockFluidSodiumpersulfate; public static Fluid fluidTritium = new TechRebornFluid("fluidtritium"); public static BlockFluidBase BlockFluidTritium; public static Fluid fluidWolframium = new TechRebornFluid("fluidwolframium"); public static BlockFluidBase BlockFluidWolframium; public static void init() { FluidRegistry.registerFluid(fluidberylium); BlockFluidBerylium = new BlockFluidTechReborn(fluidberylium, Material.WATER, "techreborn.berylium"); GameRegistry.registerBlock(BlockFluidBerylium, ModInfo.MOD_ID + "_" + BlockFluidBerylium.getUnlocalizedName().substring(5)); FluidRegistry.registerFluid(fluidcalcium); BlockFluidCalcium = new BlockFluidTechReborn(fluidcalcium, Material.WATER, "techreborn.calcium"); GameRegistry.registerBlock(BlockFluidCalcium, ModInfo.MOD_ID + "_" + BlockFluidCalcium.getUnlocalizedName().substring(5)); FluidRegistry.registerFluid(fluidcalciumcarbonate); BlockFluidCalciumCarbonate = new BlockFluidTechReborn(fluidcalciumcarbonate, Material.WATER, "techreborn.calciumcarbonate"); GameRegistry.registerBlock(BlockFluidCalciumCarbonate, ModInfo.MOD_ID + "_" + BlockFluidCalciumCarbonate.getUnlocalizedName().substring(5)); FluidRegistry.registerFluid(fluidChlorite); BlockFluidChlorite = new BlockFluidTechReborn(fluidChlorite, Material.WATER, "techreborn.chlorite"); GameRegistry.registerBlock(BlockFluidChlorite, ModInfo.MOD_ID + "_" + BlockFluidChlorite.getUnlocalizedName().substring(5)); FluidRegistry.registerFluid(fluidDeuterium); BlockFluidDeuterium = new BlockFluidTechReborn(fluidDeuterium, Material.WATER, "techreborn.deuterium"); GameRegistry.registerBlock(BlockFluidDeuterium, ModInfo.MOD_ID + "_" + BlockFluidDeuterium.getUnlocalizedName().substring(5)); FluidRegistry.registerFluid(fluidGlyceryl); BlockFluidGlyceryl = new BlockFluidTechReborn(fluidGlyceryl, Material.WATER, "techreborn.glyceryl"); GameRegistry.registerBlock(BlockFluidGlyceryl, ModInfo.MOD_ID + "_" + BlockFluidGlyceryl.getUnlocalizedName().substring(5)); FluidRegistry.registerFluid(fluidHelium); BlockFluidHelium = new BlockFluidTechReborn(fluidHelium, Material.WATER, "techreborn.helium"); GameRegistry.registerBlock(BlockFluidHelium, ModInfo.MOD_ID + "_" + BlockFluidHelium.getUnlocalizedName().substring(5)); FluidRegistry.registerFluid(fluidHelium3); BlockFluidHelium3 = new BlockFluidTechReborn(fluidHelium3, Material.WATER, "techreborn.helium3"); GameRegistry.registerBlock(BlockFluidHelium3, ModInfo.MOD_ID + "_" + BlockFluidHelium3.getUnlocalizedName().substring(5)); FluidRegistry.registerFluid(fluidHeliumplasma); BlockFluidHeliumplasma = new BlockFluidTechReborn(fluidHeliumplasma, Material.WATER, "techreborn.heliumplasma"); GameRegistry.registerBlock(BlockFluidHeliumplasma, ModInfo.MOD_ID + "_" + BlockFluidHeliumplasma.getUnlocalizedName().substring(5)); FluidRegistry.registerFluid(fluidHydrogen); BlockFluidHydrogen = new BlockFluidTechReborn(fluidHydrogen, Material.WATER, "techreborn.hydrogen"); GameRegistry.registerBlock(BlockFluidHydrogen, ModInfo.MOD_ID + "_" + BlockFluidHydrogen.getUnlocalizedName().substring(5)); FluidRegistry.registerFluid(fluidLithium); BlockFluidLithium = new BlockFluidTechReborn(fluidLithium, Material.WATER, "techreborn.lithium"); GameRegistry.registerBlock(BlockFluidLithium, ModInfo.MOD_ID + "_" + BlockFluidLithium.getUnlocalizedName().substring(5)); FluidRegistry.registerFluid(fluidMercury); BlockFluidMercury = new BlockFluidTechReborn(fluidMercury, Material.WATER, "techreborn.mercury"); GameRegistry.registerBlock(BlockFluidMercury, ModInfo.MOD_ID + "_" + BlockFluidMercury.getUnlocalizedName().substring(5)); FluidRegistry.registerFluid(fluidMethane); BlockFluidMethane = new BlockFluidTechReborn(fluidMethane, Material.WATER, "techreborn.methane"); GameRegistry.registerBlock(BlockFluidMethane, ModInfo.MOD_ID + "_" + BlockFluidMethane.getUnlocalizedName().substring(5)); FluidRegistry.registerFluid(fluidNitrocoalfuel); BlockFluidNitrocoalfuel = new BlockFluidTechReborn(fluidNitrocoalfuel, Material.WATER, "techreborn.nitrocoalfuel"); GameRegistry.registerBlock(BlockFluidNitrocoalfuel, ModInfo.MOD_ID + "_" + BlockFluidNitrocoalfuel.getUnlocalizedName().substring(5)); FluidPowerManager.fluidPowerValues.put(fluidNitrocoalfuel, 48.0); FluidRegistry.registerFluid(fluidNitrofuel); BlockFluidNitrofuel = new BlockFluidTechReborn(fluidNitrofuel, Material.WATER, "techreborn.nitrofuel"); GameRegistry.registerBlock(BlockFluidNitrofuel, ModInfo.MOD_ID + "_" + BlockFluidNitrofuel.getUnlocalizedName().substring(5)); FluidRegistry.registerFluid(fluidNitrogen); BlockFluidNitrogen = new BlockFluidTechReborn(fluidNitrogen, Material.WATER, "techreborn.nitrogen"); GameRegistry.registerBlock(BlockFluidNitrogen, ModInfo.MOD_ID + "_" + BlockFluidNitrogen.getUnlocalizedName().substring(5)); FluidRegistry.registerFluid(fluidNitrogendioxide); BlockFluidNitrogendioxide = new BlockFluidTechReborn(fluidNitrogendioxide, Material.WATER, "techreborn.nitrogendioxide"); GameRegistry.registerBlock(BlockFluidNitrogendioxide, ModInfo.MOD_ID + "_" + BlockFluidNitrogendioxide.getUnlocalizedName().substring(5)); FluidRegistry.registerFluid(fluidPotassium); BlockFluidPotassium = new BlockFluidTechReborn(fluidPotassium, Material.WATER, "techreborn.potassium"); GameRegistry.registerBlock(BlockFluidPotassium, ModInfo.MOD_ID + "_" + BlockFluidPotassium.getUnlocalizedName().substring(5)); FluidRegistry.registerFluid(fluidSilicon); BlockFluidSilicon = new BlockFluidTechReborn(fluidSilicon, Material.WATER, "techreborn.silicon"); GameRegistry.registerBlock(BlockFluidSilicon, ModInfo.MOD_ID + "_" + BlockFluidSilicon.getUnlocalizedName().substring(5)); FluidRegistry.registerFluid(fluidSodium); BlockFluidSodium = new BlockFluidTechReborn(fluidSodium, Material.WATER, "techreborn.sodium"); GameRegistry.registerBlock(BlockFluidSodium, ModInfo.MOD_ID + "_" + BlockFluidSodium.getUnlocalizedName().substring(5)); FluidRegistry.registerFluid(fluidSodiumpersulfate); BlockFluidSodiumpersulfate = new BlockFluidTechReborn(fluidSodiumpersulfate, Material.WATER, "techreborn.sodiumpersulfate"); GameRegistry.registerBlock(BlockFluidSodiumpersulfate, ModInfo.MOD_ID + "_" + BlockFluidSodiumpersulfate.getUnlocalizedName().substring(5)); FluidRegistry.registerFluid(fluidTritium); BlockFluidTritium = new BlockFluidTechReborn(fluidTritium, Material.WATER, "techreborn.tritium"); GameRegistry.registerBlock(BlockFluidTritium, ModInfo.MOD_ID + "_" + BlockFluidTritium.getUnlocalizedName().substring(5)); FluidRegistry.registerFluid(fluidWolframium); BlockFluidWolframium = new BlockFluidTechReborn(fluidWolframium, Material.WATER, "techreborn.wolframium"); GameRegistry.registerBlock(BlockFluidWolframium, ModInfo.MOD_ID + "_" + BlockFluidWolframium.getUnlocalizedName().substring(5)); } }
package water.parser; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import water.*; import water.fvec.ParseDataset2.ParseProgressMonitor; public abstract class CustomParser extends Iced { public static final byte CHAR_TAB = '\t'; public static final byte CHAR_LF = 10; public static final byte CHAR_SPACE = ' '; public static final byte CHAR_CR = 13; public static final byte CHAR_VT = 11; public static final byte CHAR_FF = 12; public static final byte CHAR_DOUBLE_QUOTE = '"'; public static final byte CHAR_SINGLE_QUOTE = '\''; public static final byte CHAR_NULL = 0; public static final byte CHAR_COMMA = ','; public final static int MAX_PREVIEW_COLS = 100; public final static int MAX_PREVIEW_LINES = 50; public final ParserSetup _setup; public CustomParser(ParserSetup setup){_setup = setup;} public static class PSetupGuess extends Iced { public final ParserSetup _setup; public final int _invalidLines; public final int _validLines; public final String [] _errors; public Key _setupFromFile; public Key _hdrFromFile; public String [][] _data; public PSetupGuess(ParserSetup ps, int vlines, int ilines, String [][] data, String [] errors){ _setup = ps; _invalidLines = ilines; _validLines = vlines; _errors = errors; _data = data; } public final boolean valid(){ return _setup._ncols > 0 && _validLines > 0 && _invalidLines < _validLines; } public final boolean hasErrors(){ return _errors != null && _errors.length > 0; } public String toString(){ if(!valid()) return "Parser setup appears to be broken, got " + _setup.toString(); else if(hasErrors()) return "Parser setup appears to work with some errors, got " + _setup.toString(); else return "Parser setup working fine, got " + _setup.toString(); } } public enum ParserType { AUTO(false),XLS(false),XLSX(false),CSV(true), SVMLight(true); public final boolean parallelParseSupported; ParserType(boolean par){parallelParseSupported = par;} } public static class ParserSetup extends Iced implements Cloneable{ public final ParserType _pType; public final byte _separator; public boolean _header; public boolean _singleQuotes; public String [] _columnNames; public final int _ncols; public ParserSetup() { _pType = ParserType.AUTO; _separator = CsvParser.AUTO_SEP; _header = false; _ncols = 0; _columnNames = null; } protected ParserSetup(ParserType t) { this(t,CsvParser.AUTO_SEP,0,false,null,false); } public ParserSetup(ParserType t, byte sep, boolean header) { _pType = t; _separator = sep; _header = header; _columnNames = null; _ncols = 0; } public ParserSetup(ParserType t, byte sep, int ncolumns, boolean header, String [] columnNames, boolean singleQuotes) { _pType = t; _separator = sep; _ncols = ncolumns; _header = header; _columnNames = columnNames; _singleQuotes = singleQuotes; } public ParserSetup clone(){ return new ParserSetup(_pType, _separator, _ncols,_header,null,_singleQuotes); } public boolean isCompatible(ParserSetup other){ if(other == null || _pType != other._pType)return false; if(_pType == ParserType.CSV && (_separator != other._separator || _ncols != other._ncols)) return false; return true; } public CustomParser parser(){ switch(this._pType){ case CSV: return new CsvParser(this); case SVMLight: return new SVMLightParser(this); case XLS: return new XlsParser(this); default: throw H2O.unimpl(); } } public String toString(){ StringBuilder sb = new StringBuilder(_pType.name()); switch(_pType){ case SVMLight: sb.append(" data with (estimated) " + _ncols + " columns."); break; case CSV: sb.append(" data with " + _ncols + " columns using '" + (char)_separator + "' (\\" + _separator + "04d) as separator."); break; case XLS: sb.append(" data with " + _ncols + " columns."); break; case AUTO: sb.append(""); break; default: throw H2O.unimpl(); } return sb.toString(); } } public boolean isCompatible(CustomParser p){return _setup == p._setup || (_setup != null && _setup.isCompatible(p._setup));} public DataOut parallelParse(int cidx, final DataIn din, final DataOut dout) {throw new UnsupportedOperationException();} public boolean parallelParseSupported(){return false;} public DataOut streamParse( final InputStream is, final DataOut dout) throws Exception { if(_setup._pType.parallelParseSupported){ StreamData din = new StreamData(is); int cidx=0; while( is.available() > 0 ) parallelParse(cidx++,din,dout); parallelParse(cidx++,din,dout); // Parse the remaining partial 32K buffer } else { throw H2O.unimpl(); } return dout; } // Zipped file; no parallel decompression; decompress into local chunks, // parse local chunks; distribute chunks later. public DataOut streamParse( final InputStream is, final StreamDataOut dout, ParseProgressMonitor pmon) throws IOException { // All output into a fresh pile of NewChunks, one per column if(_setup._pType.parallelParseSupported){ StreamData din = new StreamData(is); int cidx=0; StreamDataOut nextChunk = dout; long lastProgress = pmon.progress(); while( is.available() > 0 ){ if(pmon.progress() > lastProgress){ lastProgress = pmon.progress(); nextChunk.close(); if(dout != nextChunk)dout.reduce(nextChunk); nextChunk = nextChunk.nextChunk(); } parallelParse(cidx++,din,nextChunk); } parallelParse(cidx++,din,nextChunk); // Parse the remaining partial 32K buffer nextChunk.close(); if(dout != nextChunk)dout.reduce(nextChunk); } else { throw H2O.unimpl(); } return dout; } protected static final boolean isWhitespace(byte c) { return (c == CHAR_SPACE) || (c == CHAR_TAB); } protected static final boolean isEOL(byte c) { return ((c == CHAR_LF) || (c == CHAR_CR)); } public interface DataIn { // Get another chunk of byte data public abstract byte[] getChunkData( int cidx ); } public interface DataOut extends Freezable { public void setColumnNames(String [] names); // Register a newLine from the parser public void newLine(); // True if already forced into a string column (skip number parsing) public boolean isString(int colIdx); // Add a number column with given digits & exp public void addNumCol(int colIdx, long number, int exp); // Add a number column with given digits & exp public void addNumCol(int colIdx, double d); // An an invalid / missing entry public void addInvalidCol(int colIdx); // Add a String column public void addStrCol( int colIdx, ValueString str ); // Final rolling back of partial line public void rollbackLine(); public void invalidLine(String err); public void invalidValue(int line, int col); } public interface StreamDataOut extends DataOut { public StreamDataOut nextChunk(); public StreamDataOut reduce(StreamDataOut dout); public StreamDataOut close(); public StreamDataOut close(Futures fs); } public static class StreamData implements CustomParser.DataIn { final transient InputStream _is; private byte[] _bits0 = new byte[64*1024]; private byte[] _bits1 = new byte[64*1024]; private int _cidx0=-1, _cidx1=-1; // Chunk public StreamData(InputStream is){_is = is;} @Override public byte[] getChunkData(int cidx) { if(cidx == _cidx0)return _bits0; if(cidx == _cidx1)return _bits1; assert cidx==_cidx0+1 || cidx==_cidx1+1; byte[] bits = _cidx0<_cidx1 ? _bits0 : _bits1; if( _cidx0<_cidx1 ) _cidx0 = cidx; else _cidx1 = cidx; // Read as much as the buffer will hold int off=0; try { while( off < bits.length ) { int len = _is.read(bits,off,bits.length-off); if( len == -1 ) break; off += len; } assert off == bits.length || _is.available() <= 0; } catch( IOException ioe ) { //_parserr = ioe.toString(); } throw new RuntimeException(ioe); } if( off == bits.length ) return bits; // Final read is short; cache the short-read byte[] bits2 = (off == 0) ? null : Arrays.copyOf(bits,off); if( _cidx0==cidx ) _bits0 = bits2; else _bits1 = bits2; return bits2; } } public abstract CustomParser clone(); public String [] headers(){return null;} protected static class CustomInspectDataOut extends Iced implements DataOut { public int _nlines; public int _ncols; public int _invalidLines; public boolean _header; private String [] _colNames; private String [][] _data = new String[MAX_PREVIEW_LINES][MAX_PREVIEW_COLS]; transient ArrayList<String> _errors; public CustomInspectDataOut() { for(int i = 0; i < MAX_PREVIEW_LINES;++i) Arrays.fill(_data[i],"NA"); } public String [][] data(){ String [][] res = Arrays.copyOf(_data, Math.min(MAX_PREVIEW_LINES, _nlines)); for(int i = 0; i < res.length; ++i) res[i] = Arrays.copyOf(_data[i], Math.min(MAX_PREVIEW_COLS,_ncols)); return (_data = res); } @Override public void setColumnNames(String[] names) { _colNames = names; _data[0] = names; ++_nlines; _ncols = names.length; _header = true; } @Override public void newLine() { ++_nlines; } @Override public boolean isString(int colIdx) {return false;} @Override public void addNumCol(int colIdx, long number, int exp) { if(colIdx < _ncols && _nlines < MAX_PREVIEW_LINES) _data[_nlines][colIdx] = Double.toString(number*DParseTask.pow10(exp)); } @Override public void addNumCol(int colIdx, double d) { _ncols = Math.max(_ncols,colIdx); if(_nlines < MAX_PREVIEW_LINES && colIdx < MAX_PREVIEW_COLS) _data[_nlines][colIdx] = Double.toString(d); } @Override public void addInvalidCol(int colIdx) { if(colIdx < _ncols && _nlines < MAX_PREVIEW_LINES) _data[_nlines][colIdx] = "NA"; } @Override public void addStrCol(int colIdx, ValueString str) { if(colIdx < _ncols && _nlines < MAX_PREVIEW_LINES) _data[_nlines][colIdx] = str.toString(); } @Override public void rollbackLine() {--_nlines;} @Override public void invalidLine(String err) { ++_invalidLines; _errors.add("Error at line: " + _nlines + ", reason: " + err); } @Override public void invalidValue(int linenum, int colnum) {} } }
package water.parser; import java.io.EOFException; import java.io.InputStream; import java.util.*; import java.util.concurrent.ExecutionException; import java.util.zip.*; import jsr166y.CountedCompleter; import water.*; import water.H2O.H2OCountedCompleter; import water.fvec.Frame; import water.parser.CustomParser.PSetupGuess; import water.parser.CustomParser.ParserSetup; import water.parser.CustomParser.ParserType; import water.parser.DParseTask.Pass; import water.util.*; import water.util.Utils.IcedArrayList; import com.google.common.base.Throwables; import com.google.common.io.Closeables; /** * Helper class to parse an entire ValueArray data, and produce a structured ValueArray result. * * @author <a href="mailto:cliffc@0xdata.com"></a> */ @SuppressWarnings("fallthrough") public final class ParseDataset extends Job { public static enum Compression { NONE, ZIP, GZIP } public static int PLIMIT = Integer.MAX_VALUE; public final Key _progress; private ParseDataset(Key dest, Key[] keys) { destination_key = dest; Value dataset = DKV.get(keys[0]); long total = dataset.length() * Pass.values().length; for(int i = 1; i < keys.length; ++i){ dataset = DKV.get(keys[i]); total += dataset.length() * Pass.values().length; } _progress = Key.make(UUID.randomUUID().toString(), (byte) 0, Key.JOB); UKV.put(_progress, new Progress(0,total)); } public static PSetupGuess guessSetup(byte [] bits){ return guessSetup(bits,new ParserSetup(),true); } public static class GuessSetupTsk extends MRTask<GuessSetupTsk> { final CustomParser.ParserSetup _userSetup; final boolean _checkHeader; PSetupGuess _gSetup; IcedArrayList<Key> _failedSetup; IcedArrayList<Key> _conflicts; public GuessSetupTsk(CustomParser.ParserSetup userSetup, boolean checkHeader){ _userSetup = userSetup; assert _userSetup != null; _checkHeader = checkHeader; assert !_userSetup._header || !checkHeader; } public static final int MAX_ERRORS = 64; @Override public void map(Key key) { _failedSetup = new IcedArrayList<Key>(); _conflicts = new IcedArrayList<Key>(); byte [] bits = Utils.getFirstUnzipedBytes(key); _gSetup = ParseDataset.guessSetup(bits, _userSetup, _checkHeader); if(_gSetup == null || !_gSetup.valid()) _failedSetup.add(key); else { _gSetup._setupFromFile = key; if(_checkHeader && _gSetup._setup._header) _gSetup._hdrFromFile = key; } } @Override public void reduce(GuessSetupTsk drt) { if(_gSetup == null || !_gSetup.valid()){ _gSetup = drt._gSetup; _gSetup._hdrFromFile = drt._gSetup._hdrFromFile; _gSetup._setupFromFile = drt._gSetup._setupFromFile; } else if(drt._gSetup.valid() && !_gSetup._setup.isCompatible(drt._gSetup._setup) ){ if(_conflicts.contains(_gSetup._setupFromFile) && !drt._conflicts.contains(drt._gSetup._setupFromFile)){ _gSetup = drt._gSetup; // setups are not compatible, select random setup to send up (thus, the most common setup should make it to the top) _gSetup._setupFromFile = drt._gSetup._setupFromFile; _gSetup._hdrFromFile = drt._gSetup._hdrFromFile; } else if(!drt._conflicts.contains(drt._gSetup._setupFromFile)) { _conflicts.add(_gSetup._setupFromFile); _conflicts.add(drt._gSetup._setupFromFile); } } else if(drt._gSetup.valid()){ // merge the two setups if(!_gSetup._setup._header && drt._gSetup._setup._header){ _gSetup._setup._header = true; _gSetup._hdrFromFile = drt._gSetup._hdrFromFile; _gSetup._setup._columnNames = drt._gSetup._setup._columnNames; } if(_gSetup._data.length < CustomParser.MAX_PREVIEW_LINES){ int n = _gSetup._data.length; int m = Math.min(CustomParser.MAX_PREVIEW_LINES, n + drt._gSetup._data.length-1); _gSetup._data = Arrays.copyOf(_gSetup._data, m); for(int i = n; i < m; ++i){ _gSetup._data[i] = drt._gSetup._data[i-n+1]; } } } // merge failures if(_failedSetup == null){ _failedSetup = drt._failedSetup; _conflicts = drt._conflicts; } else { _failedSetup.addAll(drt._failedSetup); _conflicts.addAll(drt._conflicts); } } } public static class ParseSetupGuessException extends RuntimeException { public final PSetupGuess _gSetup; public final Key [] _failed; public ParseSetupGuessException(String msg, PSetupGuess gSetup, Key [] failed){ super(msg); _gSetup = gSetup; _failed = failed; } } public static CustomParser.PSetupGuess guessSetup(ArrayList<Key> keys,Key headerKey, CustomParser.ParserSetup setup, boolean checkHeader) { String [] colNames = null; CustomParser.PSetupGuess gSetup = null; boolean headerKeyPartOfParse = false; if(headerKey != null ){ if(keys.contains(headerKey)){ headerKeyPartOfParse = true; keys.remove(headerKey); // process the header key separately } } if(keys.size() > 1){ GuessSetupTsk t = new GuessSetupTsk(setup,checkHeader); Key [] ks = new Key[keys.size()]; keys.toArray(ks); t.invoke(ks); gSetup = t._gSetup; if(gSetup.valid() && (!t._failedSetup.isEmpty() || !t._conflicts.isEmpty())){ // run guess setup once more, this time knowing the global setup to get rid of conflicts (turns them into failures) and bogus failures (i.e. single line files with unexpected separator) GuessSetupTsk t2 = new GuessSetupTsk(gSetup._setup, !gSetup._setup._header); HashSet<Key> keySet = new HashSet<Key>(t._conflicts); keySet.addAll(t._failedSetup); Key [] keys2 = new Key[keySet.size()]; t2.invoke(keySet.toArray(keys2)); t._failedSetup = t2._failedSetup; t._conflicts = t2._conflicts; if(!gSetup._setup._header && t2._gSetup._setup._header){ gSetup._setup._header = true; gSetup._setup._columnNames = t2._gSetup._setup._columnNames; t._gSetup._hdrFromFile = t2._gSetup._hdrFromFile; } } assert t._conflicts.isEmpty(); // we should not have any conflicts here, either we failed to find any valid global setup, or conflicts should've been converted into failures in the second pass if(!t._failedSetup.isEmpty()){ Key [] fks = new Key[t._failedSetup.size()]; throw new ParseSetupGuessException("Can not parse: Got incompatible files.", gSetup, t._failedSetup.toArray(fks)); } } else if(!keys.isEmpty()) gSetup = ParseDataset.guessSetup(Utils.getFirstUnzipedBytes(keys.get(0)),setup,checkHeader); if(!gSetup.valid()) throw new ParseSetupGuessException("",gSetup,null); if(headerKey != null){ // separate headerKey Value v = DKV.get(headerKey); if(!v.isRawData()){ // either ValueArray or a Frame, just extract the headers if(v.isArray()){ ValueArray ary = v.get(); colNames = ary.colNames(); } else if(v.isFrame()){ Frame fr = v.get(); colNames = fr._names; } else throw new ParseSetupGuessException("Headers can only come from unparsed data, ValueArray or a frame. Got " + v.newInstance().getClass().getSimpleName(),gSetup,null); } else { // check the hdr setup by parsing first bytes CustomParser.ParserSetup lSetup = gSetup._setup.clone(); lSetup._header = true; PSetupGuess hSetup = ParseDataset.guessSetup(Utils.getFirstUnzipedBytes(headerKey),lSetup,false); if(hSetup == null || !hSetup.valid()) { // no match with global setup, try once more with general setup (e.g. header file can have different separator than the rest) ParserSetup stp = new ParserSetup(); stp._header = true; hSetup = ParseDataset.guessSetup(Utils.getFirstUnzipedBytes(headerKey),stp,false); } if(!hSetup.valid() || hSetup._setup._columnNames == null) throw new ParseSetupGuessException("Invalid header file. I did not find any column names.",gSetup,null); if(hSetup._setup._ncols != gSetup._setup._ncols) throw new ParseSetupGuessException("Header file has different number of columns than the rest!, expected " + gSetup._setup._ncols + " columns, got " + hSetup._setup._ncols + ", header: " + Arrays.toString(hSetup._setup._columnNames),gSetup,null); if(hSetup._data != null && hSetup._data.length > 1){// the hdr file had both hdr and data, it better be part of the parse and represent the global parser setup if(!headerKeyPartOfParse) throw new ParseSetupGuessException(headerKey + " can not be used as a header file. Please either parse it separately first or include the file in the parse. Raw (unparsed) files can only be used as headers if they are included in the parse or they contain ONLY the header and NO DATA.",gSetup,null); else if(gSetup._setup.isCompatible(hSetup._setup)){ gSetup = hSetup; keys.add(headerKey); // put the key back so the file is parsed! }else throw new ParseSetupGuessException("Header file is not compatible with the other files.",gSetup, null); } else if(hSetup != null && hSetup._setup._columnNames != null) colNames = hSetup._setup._columnNames; else throw new ParseSetupGuessException("Invalid header file. I did not find any column names.",gSetup,null); } } // now set the header info in the final setup if(colNames != null){ gSetup._setup._header = true; gSetup._setup._columnNames = colNames; gSetup._hdrFromFile = headerKey; } return gSetup; } public static PSetupGuess guessSetup(byte [] bits, ParserSetup setup, boolean checkHeader){ if(bits == null)return new PSetupGuess(new ParserSetup(), 0, 0, null, null); ArrayList<PSetupGuess> guesses = new ArrayList<CustomParser.PSetupGuess>(); PSetupGuess res = null; if(setup == null)setup = new ParserSetup(); switch(setup._pType){ case CSV: return CsvParser.guessSetup(bits,setup,checkHeader); case SVMLight: return SVMLightParser.guessSetup(bits); case XLS: return XlsParser.guessSetup(bits); case AUTO: try{ if((res = XlsParser.guessSetup(bits)) != null && res.valid()) if(!res.hasErrors())return res; else guesses.add(res); }catch(Exception e){} try{ if((res = SVMLightParser.guessSetup(bits)) != null && res.valid()) if(!res.hasErrors())return res; else guesses.add(res); }catch(Exception e){} try{ if((res = CsvParser.guessSetup(bits,setup,checkHeader)) != null && res.valid()) if(!res.hasErrors())return res; else guesses.add(res); }catch(Exception e){e.printStackTrace();} if(res == null || !res.valid() && !guesses.isEmpty()){ for(PSetupGuess pg:guesses) if(res == null || pg._validLines > res._validLines) res = pg; } assert res != null; return res; default: throw H2O.unimpl(); } } public static void parse(Key okey, Key [] keys){ forkParseDataset(okey, keys, null).get(); } static DParseTask tryParseXls(Value v,ParseDataset job){ DParseTask t = new DParseTask().createPassOne(v, job, new XlsParser(null)); try{t.passOne();} catch(Exception e) {return null;} return t; } public static void parse(ParseDataset job, Key [] keys, CustomParser.ParserSetup setup) { if(setup == null){ ArrayList<Key> ks = new ArrayList<Key>(keys.length); for (Key k:keys)ks.add(k); PSetupGuess guess = guessSetup(ks, null, new ParserSetup(), true); if(!guess.valid())throw new RuntimeException("can not parse this dataset, did not find working setup"); setup = guess._setup; } int j = 0; UKV.remove(job.dest());// remove any previous instance and insert a sentinel (to ensure no one has been writing to the same keys during our parse! Key [] nonEmptyKeys = new Key[keys.length]; for (int i = 0; i < keys.length; ++i) { Value v = DKV.get(keys[i]); if (v == null || v.length() > 0) // skip nonzeros nonEmptyKeys[j++] = keys[i]; } if (j < nonEmptyKeys.length) // remove the nulls keys = Arrays.copyOf(nonEmptyKeys, j); if (keys.length == 0) { job.cancel(); return; } if(setup == null || setup._pType == ParserType.XLS){ DParseTask p1 = tryParseXls(DKV.get(keys[0]),job); if(p1 != null) { if(keys.length == 1){ // shortcut for 1 xls file, we already have pass one done, just do the 2nd pass and we're done DParseTask p2 = p1.createPassTwo(); p2.passTwo(); p2.createValueArrayHeader(); job.remove(); return; } else throw H2O.unimpl(); } } UnzipAndParseTask tsk = new UnzipAndParseTask(job, setup); tsk.invoke(keys); DParseTask [] p2s = new DParseTask[keys.length]; DParseTask phaseTwo = tsk._tsk.createPassTwo(); // too keep original order of the keys... HashMap<Key, FileInfo> fileInfo = new HashMap<Key, FileInfo>(); long rowCount = 0; for(int i = 0; i < tsk._fileInfo.length; ++i) fileInfo.put(tsk._fileInfo[i]._ikey,tsk._fileInfo[i]); // run pass 2 for(int i = 0; i < keys.length; ++i){ FileInfo finfo = fileInfo.get(keys[i]); Key k = finfo._okey; long nrows = finfo._nrows[finfo._nrows.length-1]; for(j = 0; j < finfo._nrows.length; ++j) finfo._nrows[j] += rowCount; rowCount += nrows; p2s[i] = phaseTwo.makePhase2Clone(finfo).dfork(k); } phaseTwo._sigma = new double[phaseTwo._ncolumns]; phaseTwo._invalidValues = new long[phaseTwo._ncolumns]; // now put the results together and create ValueArray header for(int i = 0; i < p2s.length; ++i){ DParseTask t = p2s[i]; try{ p2s[i].get(); }catch(Exception e){throw new RuntimeException(e);} Utils.add(phaseTwo._sigma,t._sigma); Utils.add(phaseTwo._invalidValues,t._invalidValues); if ((t._error != null) && !t._error.isEmpty()) { System.err.println(phaseTwo._error); throw new RuntimeException("The dataset format is not recognized/supported"); } FileInfo finfo = fileInfo.get(keys[i]); UKV.remove(finfo._okey); } phaseTwo.normalizeSigma(); phaseTwo._colNames = setup._columnNames; if(setup._header) phaseTwo.setColumnNames(setup._columnNames); phaseTwo.createValueArrayHeader(); } public static class ParserFJTask extends H2OCountedCompleter { final ParseDataset job; Key [] keys; CustomParser.ParserSetup setup; public ParserFJTask(ParseDataset job, Key [] keys, CustomParser.ParserSetup setup){ this.job = job; this.keys = keys; this.setup = setup; } @Override public void compute2() { parse(job, keys,setup); tryComplete(); } @Override public void onCompletion(CountedCompleter cmp){job.remove();} @Override public boolean onExceptionalCompletion(Throwable ex, CountedCompleter caller){ job.cancel("Got Exception " + ex.getClass().getSimpleName() + ", with msg " + ex.getMessage()); return super.onExceptionalCompletion(ex, caller); } } public static Job forkParseDataset(final Key dest, final Key[] keys, final CustomParser.ParserSetup setup) { ParseDataset job = new ParseDataset(dest, keys); H2OCountedCompleter fjt = new ParserFJTask(job, keys, setup); job.start(fjt); H2O.submitTask(fjt); return job; } public static class ParseException extends RuntimeException { public ParseException(String msg) { super(msg); } } public static class FileInfo extends Iced{ Key _ikey; Key _okey; long [] _nrows; boolean _header; } public static class UnzipAndParseTask extends DRemoteTask { final ParseDataset _job; DParseTask _tsk; FileInfo [] _fileInfo; CustomParser.ParserSetup _parserSetup; public UnzipAndParseTask(ParseDataset job, CustomParser.ParserSetup parserSetup) { this(job,parserSetup, Integer.MAX_VALUE); } public UnzipAndParseTask(ParseDataset job, CustomParser.ParserSetup parserSetup, int maxParallelism) { _job = job; _parserSetup = parserSetup; } @Override public DRemoteTask dfork( Key... keys ) { _keys = keys; if(_parserSetup == null) _parserSetup = ParseDataset.guessSetup(Utils.getFirstUnzipedBytes(keys[0]))._setup; H2O.submitTask(this); return this; } static private class UnzipProgressMonitor implements ProgressMonitor { int _counter = 0; Key _progress; public UnzipProgressMonitor(Key progress){_progress = progress;} @Override public void update(long n) { n += _counter; if(n > (1 << 20)){ onProgress(n, _progress); _counter = 0; } else _counter = (int)n; } } // actual implementation of unzip and parse, intended for the FJ computation private class UnzipAndParseLocalTask extends H2OCountedCompleter { final int _idx; public UnzipAndParseLocalTask(int idx){ _idx = idx; setCompleter(UnzipAndParseTask.this); } protected DParseTask _p1; @Override public void compute2() { final Key key = _keys[_idx]; Value v = DKV.get(key); assert v != null; ParserSetup localSetup = ParseDataset.guessSetup(Utils.getFirstUnzipedBytes(v), _parserSetup,false)._setup; if(!_parserSetup.isCompatible(localSetup))throw new ParseException("Parsing incompatible files. " + _parserSetup.toString() + " is not compatible with " + localSetup.toString()); _fileInfo[_idx] = new FileInfo(); _fileInfo[_idx]._ikey = key; _fileInfo[_idx]._okey = key; if(localSetup._header &= _parserSetup._header) { assert localSetup._columnNames != null:"parsing " + key; assert _parserSetup._columnNames != null:"parsing " + key; for(int i = 0; i < _parserSetup._ncols; ++i) localSetup._header &= _parserSetup._columnNames[i].equalsIgnoreCase(localSetup._columnNames[i]); } _fileInfo[_idx]._header = localSetup._header; CustomParser parser = null; DParseTask dpt = null; switch(localSetup._pType){ case CSV: parser = new CsvParser(localSetup); dpt = new DParseTask(); break; case SVMLight: parser = new SVMLightParser(localSetup); dpt = new SVMLightDParseTask(); break; default: throw H2O.unimpl(); } long csz = v.length(); Compression comp = Utils.guessCompressionMethod(DKV.get(key).getFirstBytes()); if(comp != Compression.NONE){ onProgressSizeChange(csz,_job); // additional pass through the data to decompress InputStream is = null; InputStream ris = null; try { ris = v.openStream(new UnzipProgressMonitor(_job._progress)); switch(comp){ case ZIP: ZipInputStream zis = new ZipInputStream(ris); ZipEntry ze = zis.getNextEntry(); // There is at least one entry in zip file and it is not a directory. if (ze == null || ze.isDirectory()) throw new Exception("Unsupported zip file: " + ((ze == null) ? "No entry found": "Files containing directory are not supported.")); is = zis; break; case GZIP: is = new GZIPInputStream(ris); break; default: Log.info("Can't understand compression: _comp: "+ comp+" csz: "+csz+" key: "+key+" ris: "+ris); throw H2O.unimpl(); } _fileInfo[_idx]._okey = Key.make(new String(key._kb) + "_UNZIPPED"); ValueArray.readPut(_fileInfo[_idx]._okey, is,_job); v = DKV.get(_fileInfo[_idx]._okey); onProgressSizeChange(2*(v.length() - csz), _job); // the 2 passes will go over larger file! assert v != null; }catch (EOFException e){ if(ris != null && ris instanceof RIStream){ RIStream r = (RIStream)ris; System.err.println("Unexpected eof after reading " + r.off() + "bytes, expeted size = " + r.expectedSz()); } System.err.println("failed decompressing data " + key.toString() + " with compression " + comp); throw new RuntimeException(e); } catch (Throwable t) { System.err.println("failed decompressing data " + key.toString() + " with compression " + comp); throw new RuntimeException(t); } finally { Closeables.closeQuietly(is); } } _p1 = dpt.createPassOne(v, _job, parser); _p1.setCompleter(this); _p1.passOne(); // if(_parser instanceof CsvParser){ // CustomParser p2 = null; // gues parser hereInspect.csvGuessValue(v); // if(setup._data[0].length != _ncolumns) // throw new ParseException("Found conflicting number of columns (using separator " + (int)_sep + ") when parsing multiple files. Found " + setup._data[0].length + " columns in " + key + " , but expected " + _ncolumns); // _fileInfo[_idx]._header = setup._header; // if(_fileInfo[_idx]._header && _headers != null) // check if we have the header, it should be the same one as we got from the head // for(int i = 0; i < setup._data[0].length; ++i) // _fileInfo[_idx]._header = _fileInfo[_idx]._header && setup._data[0][i].equalsIgnoreCase(_headers[i]); // setup = new CsvParser.Setup(_sep, _fileInfo[_idx]._header, setup._data, setup._numlines, setup._bits); // _p1 = DParseTask.createPassOne(v, _job, _pType); // _p1.setCompleter(this); // _p1.passOne(setup); // DO NOT call tryComplete here, _p1 calls it! // } else { // _p1 = tryParseXls(v,_job); // if(_p1 == null) // throw new ParseException("Found conflicting types of files. Can not parse xls and not-xls files together"); // tryComplete(); } @Override public void onCompletion(CountedCompleter caller){ try{ _fileInfo[_idx]._nrows = _p1._nrows; long numRows = 0; for(int i = 0; i < _p1._nrows.length; ++i){ numRows += _p1._nrows[i]; _fileInfo[_idx]._nrows[i] = numRows; } }catch(Throwable t){t.printStackTrace();} quietlyComplete(); // wake up anyone who is joining on this task! } } @Override public void lcompute() { try{ _fileInfo = new FileInfo[_keys.length]; subTasks = new UnzipAndParseLocalTask[_keys.length]; setPendingCount(subTasks.length); int p = 0; int j = 0; for(int i = 0; i < _keys.length; ++i){ if(p == ParseDataset.PLIMIT) subTasks[j++].join(); else ++p; H2O.submitTask((subTasks[i] = new UnzipAndParseLocalTask(i))); } }catch(Throwable t){t.printStackTrace();} tryComplete(); } transient UnzipAndParseLocalTask [] subTasks; @Override public final void lonCompletion(CountedCompleter caller){ try{ _tsk = subTasks[0]._p1; for(int i = 1; i < _keys.length; ++i){ DParseTask tsk = subTasks[i]._p1; tsk._nrows = _tsk._nrows; _tsk.reduce(tsk); } }catch(Throwable t){t.printStackTrace();} } @Override public void reduce(DRemoteTask drt) { try{ UnzipAndParseTask tsk = (UnzipAndParseTask)drt; if(_tsk == null && _fileInfo == null){ _fileInfo = tsk._fileInfo; _tsk = tsk._tsk; } else { final int n = _fileInfo.length; _fileInfo = Arrays.copyOf(_fileInfo, n + tsk._fileInfo.length); System.arraycopy(tsk._fileInfo, 0, _fileInfo, n, tsk._fileInfo.length); // we do not want to merge nrows from different files, apart from that, we want to use standard reduce! tsk._tsk._nrows = _tsk._nrows; _tsk.reduce(tsk._tsk); } }catch(Throwable t){t.printStackTrace();} } } // True if the array is all NaNs static boolean allNaNs(double ds[]) { for( double d : ds ) if( !Double.isNaN(d) ) return false; return true; } // Progress (TODO count chunks in VA, unify with models?) static class Progress extends Iced { long _total; long _value; Progress(long val, long total){_value = val; _total = total;} } @Override public float progress() { Progress progress = UKV.get(_progress); if(progress == null || progress._total == 0) return 0; return progress._value / (float) progress._total; } @Override public void remove() { DKV.remove(_progress); super.remove(); } static final void onProgress(final Key chunk, final Key progress) { assert progress != null; Value val = DKV.get(chunk); if (val == null) return; final long len = val.length(); onProgress(len, progress); } static final void onProgress(final long len, final Key progress) { new TAtomic<Progress>() { @Override public Progress atomic(Progress old) { if (old == null) return null; old._value += len; return old; } }.fork(progress); } static final void onProgressSizeChange(final long len, final ParseDataset job) { new TAtomic<Progress>() { @Override public Progress atomic(Progress old) { if (old == null) return null; old._total += len; return old; } }.fork(job._progress); } }
package storm2014; import edu.wpi.first.wpilibj.ADXL345_I2C; import edu.wpi.first.wpilibj.AnalogChannel; import edu.wpi.first.wpilibj.Compressor; import edu.wpi.first.wpilibj.DriverStation; import storm2014.subsystems.DriveTrain; import edu.wpi.first.wpilibj.IterativeRobot; import edu.wpi.first.wpilibj.Victor; import edu.wpi.first.wpilibj.command.Command; import edu.wpi.first.wpilibj.command.Scheduler; import edu.wpi.first.wpilibj.livewindow.LiveWindow; import edu.wpi.first.wpilibj.networktables.NetworkTable; import edu.wpi.first.wpilibj.smartdashboard.SendableChooser; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; import storm2014.commands.DriveForward; import storm2014.commands.SetArmPosition; import storm2014.commands.autonomous.DriveAndShoot; import storm2014.commands.autonomous.DriveAndShoot2Ball; import storm2014.commands.autonomous.DriveAndShootNoWait; import storm2014.subsystems.Catapult; import storm2014.subsystems.Intake; import storm2014.subsystems.LEDRing; import storm2014.subsystems.LEDStrip; import storm2014.subsystems.StaticLEDStrip; import storm2014.utilities.pipeline.FilterTask; import storm2014.utilities.pipeline.ISource; import storm2014.utilities.pipeline.LowPassFilter; /** * This is the robot's "Main class" which is run by the VM. */ //create allcellaromiter class, send data to dashboard in send sensor method in robot.java public class Robot extends IterativeRobot { // All subsystems are accessible by Robot.name public static OI oi; public static DriveTrain driveTrain; public static LEDStrip leds; public static Intake intake; public static Catapult catapult; public static LEDRing ledring; public static StaticLEDStrip staticleds; Compressor compressor; Command teleop; String[] autonomiceNames; Command[] autonomice; SendableChooser chooser = new SendableChooser(); Command autonomouse; private void sendSensorData() { SmartDashboard.putNumber("Gyro", driveTrain.getGyroAngle()); SmartDashboard.putNumber("Left Distance", driveTrain.getLeftDistance()); SmartDashboard.putNumber("Right Distance", driveTrain.getRightDistance()); SmartDashboard.putString("Gear", driveTrain.isHighgear() ? "High gear" : "Low gear"); SmartDashboard.putBoolean("Latch Engaged", catapult.isLatched()); SmartDashboard.putString("Arm mode", intake.getModeName()); SmartDashboard.putBoolean("Compressed", compressor.getPressureSwitchValue()); SmartDashboard.putBoolean("Arms down", intake.armSafe()); } /** Called on robot boot. */ public void robotInit() { catapult = new Catapult(); driveTrain = new DriveTrain(); leds = new LEDStrip(); intake = new Intake(); ledring = new LEDRing(); staticleds = new StaticLEDStrip(); compressor = new Compressor(RobotMap.PORT_SWITCH_COMPRESSO, RobotMap.PORT_RELAY_COMPRESSOR); compressor.start(); // Initialize OI last so it doesn't try to access null subsystems oi = new OI(); SmartDashboard.putData("Arms out", new SetArmPosition(2)); SmartDashboard.putData("Arms in", new SetArmPosition(0)); // The names, and corresponding Commands of our autonomous modes autonomiceNames = new String[]{"Drive Forward","1 Ball Hot","1 Ball Blind","2 Ball"}; autonomice = new Command[]{new DriveForward(0.8, 5250),new DriveAndShoot(),new DriveAndShootNoWait(),new DriveAndShoot2Ball()}; // Configure and send the SendableChooser, which allows autonomous modes // to be chosen via radio button on the SmartDashboard System.out.println(autonomice.length + " autonomice"); for (int i = 0; i < autonomice.length; ++i) { chooser.addObject(autonomiceNames[i], autonomice[i]); } SmartDashboard.putData("Which Autonomouse?", chooser); SmartDashboard.putData(Scheduler.getInstance()); // Send sensor info to the SmartDashboard periodically new Command("Sensor feedback") { protected void initialize() {} protected void execute() { sendSensorData(); } protected boolean isFinished() { return false; } protected void end() {} protected void interrupted() { end(); } }.start(); leds.initTable(NetworkTable.getTable("SmartDashboard")); ledring.initTable(NetworkTable.getTable("SmartDashboard")); staticleds.initTable(NetworkTable.getTable("SmartDashboard")); } /** Called at the start of autonomous mode. */ public void autonomousInit() { SmartDashboard.putBoolean("Enabled", true); if (teleop != null) { teleop.cancel(); } autonomouse = (Command) chooser.getSelected(); if (autonomouse != null) { autonomouse.start(); } leds.setMode(LEDStrip.USAMode); } /** * Called during autonomous whenever a new driver station packet arrives * (about every 1/50 of a second). */ public void autonomousPeriodic() { // Runs commands & stuff. Scheduler.getInstance().run(); } /** Called at the start of teleop mode. */ public void teleopInit() { SmartDashboard.putBoolean("Enabled", true); if (autonomouse != null) { autonomouse.cancel(); } if (teleop != null) { teleop.start(); } leds.setMode(LEDStrip.TeleopMode); System.out.println("Init teleop"); } /** * Called during teleop whenever a new driver station packet arrives (about * every 1/50 of a second).q */ public void teleopPeriodic() { // Runs commands & stuff Scheduler.getInstance().run(); DriverStation.Alliance color = DriverStation.getInstance().getAlliance(); if (color == DriverStation.Alliance.kBlue){ SmartDashboard.putBoolean("Blue Alliance?", true); staticleds.setRed((short) 0); staticleds.setGreen((short) 0); staticleds.setBlue((short) 255); } else if (color == DriverStation.Alliance.kRed){ SmartDashboard.putBoolean("Blue Alliance?", false); staticleds.setRed((short) 255); staticleds.setGreen((short) 0); staticleds.setBlue((short) 0); } else { SmartDashboard.putBoolean("Blue Alliance?", false); staticleds.setRed((short) 255); staticleds.setGreen((short) 0); staticleds.setBlue((short) 255); } } /** Called at the start of test mode */ public void testInit() { SmartDashboard.putBoolean("Enabled", false); leds.setMode(LEDStrip.StormSpiritMode); } /** * Called during test whenever a new driver station packet arrives (about * every 1/50 of a second). */ public void testPeriodic() { // Updates sensors & actuators on the LiveWindow LiveWindow.run(); } /** Called after any of the other modes ends. */ public void disabledInit() { SmartDashboard.putBoolean("Enabled", false); if(autonomouse != null) { autonomouse.cancel(); } if(teleop != null) { teleop.cancel(); } leds.setMode(LEDStrip.DisabledMode); catapult.resetCatapult(); } double pulseCount = 0; /** * Called during disabled whenever a new driver station packet arrives * (about every 1/50 of a second). We only have it overridden so we don't * get "Override me!" messages. */ public void disabledPeriodic() { sendSensorData(); DriverStation.Alliance color = DriverStation.getInstance().getAlliance(); if (color == DriverStation.Alliance.kBlue){ staticleds.setRed((short) 0); staticleds.setGreen((short) 0); staticleds.setBlue((short) (255 * Math.sin(pulseCount))); } else if (color == DriverStation.Alliance.kRed){ staticleds.setRed((short) (255 * Math.sin(pulseCount))); staticleds.setGreen((short) 0); staticleds.setBlue((short) 0); } else { staticleds.setRed((short) (255 * Math.sin(pulseCount))); staticleds.setGreen((short) 0); staticleds.setBlue((short) (255 * Math.sin(pulseCount))); } pulseCount += Math.PI / 200; //Should take 8 seconds to pulse on and off } }
package org.neo4j.kernel.ha; import java.io.IOException; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.nio.ByteBuffer; import java.nio.channels.ClosedChannelException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import org.jboss.netty.bootstrap.ClientBootstrap; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFuture; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelPipelineFactory; import org.jboss.netty.channel.Channels; import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory; import org.jboss.netty.handler.codec.frame.LengthFieldBasedFrameDecoder; import org.jboss.netty.handler.codec.frame.LengthFieldPrepender; import org.jboss.netty.handler.queue.BlockingReadHandler; import org.neo4j.helpers.Triplet; import org.neo4j.kernel.IdType; import org.neo4j.kernel.ha.zookeeper.Machine; import org.neo4j.kernel.impl.util.StringLogger; /** * The {@link Master} a slave should use to communicate with its master. It * serializes requests and sends them to the master, more specifically * {@link MasterServer} (which delegates to {@link MasterImpl} * on the master side. */ public class MasterClient extends CommunicationProtocol implements Master, ChannelPipelineFactory { public static final int MAX_NUMBER_OF_CONCURRENT_REQUESTS_PER_CLIENT = 20; public static final int READ_RESPONSE_TIMEOUT_SECONDS = 20; private static final int MAX_NUMBER_OF_UNUSED_CHANNELS = 5; private final ClientBootstrap bootstrap; private final SocketAddress address; private final StringLogger msgLog; private final ExecutorService executor; private final ResourcePool<Triplet<Channel, ChannelBuffer, ByteBuffer>> channelPool = new ResourcePool<Triplet<Channel, ChannelBuffer, ByteBuffer>>( MAX_NUMBER_OF_CONCURRENT_REQUESTS_PER_CLIENT, MAX_NUMBER_OF_UNUSED_CHANNELS ) { @Override protected Triplet<Channel, ChannelBuffer, ByteBuffer> create() { ChannelFuture channelFuture = bootstrap.connect( address ); channelFuture.awaitUninterruptibly( 5, TimeUnit.SECONDS ); Triplet<Channel, ChannelBuffer, ByteBuffer> channel = null; if ( channelFuture.isSuccess() ) { channel = Triplet.of( channelFuture.getChannel(), ChannelBuffers.dynamicBuffer(), ByteBuffer.allocateDirect( 1024 * 1024 ) ); msgLog.logMessage( "Opened a new channel to " + address ); } return channel; } @Override protected boolean isAlive( Triplet<Channel, ChannelBuffer, ByteBuffer> resource ) { return resource.first().isConnected(); } @Override protected void dispose( Triplet<Channel, ChannelBuffer, ByteBuffer> resource ) { Channel channel = resource.first(); if ( channel.isConnected() ) channel.close(); } }; public MasterClient( String hostNameOrIp, int port, String storeDir ) { this.address = new InetSocketAddress( hostNameOrIp, port ); executor = Executors.newCachedThreadPool(); bootstrap = new ClientBootstrap( new NioClientSocketChannelFactory( executor, executor ) ); bootstrap.setPipelineFactory( this ); msgLog = StringLogger.getLogger( storeDir + "/messages.log" ); msgLog.logMessage( "Client connected to " + hostNameOrIp + ":" + port ); } public MasterClient( Machine machine, String storeDir ) { this( machine.getServer().first(), machine.getServer().other(), storeDir ); } private <T> Response<T> sendRequest( RequestType type, SlaveContext slaveContext, Serializer serializer, Deserializer<T> deserializer ) { Triplet<Channel, ChannelBuffer, ByteBuffer> channelContext = null; try { // Send 'em over the wire channelContext = getChannel(); Channel channel = channelContext.first(); ChannelBuffer buffer = channelContext.other(); buffer.clear(); buffer.writeByte( type.ordinal() ); if ( type.includesSlaveContext() ) { writeSlaveContext( buffer, slaveContext ); } serializer.write( buffer, channelContext.third() ); channel.write( buffer ); BlockingReadHandler<ChannelBuffer> reader = (BlockingReadHandler<ChannelBuffer>) channel.getPipeline().get( "blockingHandler" ); ChannelBuffer message = reader.read( READ_RESPONSE_TIMEOUT_SECONDS, TimeUnit.SECONDS ); if ( message == null ) { channelPool.dispose( channelContext ); throw new HaCommunicationException( "Channel has been closed" ); } T response = deserializer.read( message ); TransactionStreams txStreams = type.includesSlaveContext() ? readTransactionStreams( message ) : TransactionStreams.EMPTY; return new Response<T>( response, txStreams ); } catch ( ClosedChannelException e ) { channelPool.dispose( channelContext ); throw new HaCommunicationException( e ); } catch ( IOException e ) { throw new HaCommunicationException( e ); } catch ( InterruptedException e ) { throw new HaCommunicationException( e ); } catch ( Exception e ) { throw new HaCommunicationException( e ); } } private Triplet<Channel, ChannelBuffer, ByteBuffer> getChannel() throws Exception { return channelPool.acquire(); } private void releaseChannel() { channelPool.release(); } public IdAllocation allocateIds( final IdType idType ) { return sendRequest( RequestType.ALLOCATE_IDS, null, new Serializer() { public void write( ChannelBuffer buffer, ByteBuffer readBuffer ) throws IOException { buffer.writeByte( idType.ordinal() ); } }, new Deserializer<IdAllocation>() { public IdAllocation read( ChannelBuffer buffer ) throws IOException { return readIdAllocation( buffer ); } } ).response(); } public Response<Integer> createRelationshipType( SlaveContext context, final String name ) { return sendRequest( RequestType.CREATE_RELATIONSHIP_TYPE, context, new Serializer() { public void write( ChannelBuffer buffer, ByteBuffer readBuffer ) throws IOException { writeString( buffer, name ); } }, new Deserializer<Integer>() { @SuppressWarnings( "boxing" ) public Integer read( ChannelBuffer buffer ) throws IOException { return buffer.readInt(); } } ); } public Response<LockResult> acquireNodeWriteLock( SlaveContext context, long... nodes ) { return sendRequest( RequestType.ACQUIRE_NODE_WRITE_LOCK, context, new AcquireLockSerializer( nodes ), LOCK_RESULT_DESERIALIZER ); } public Response<LockResult> acquireNodeReadLock( SlaveContext context, long... nodes ) { return sendRequest( RequestType.ACQUIRE_NODE_READ_LOCK, context, new AcquireLockSerializer( nodes ), LOCK_RESULT_DESERIALIZER ); } public Response<LockResult> acquireRelationshipWriteLock( SlaveContext context, long... relationships ) { return sendRequest( RequestType.ACQUIRE_RELATIONSHIP_WRITE_LOCK, context, new AcquireLockSerializer( relationships ), LOCK_RESULT_DESERIALIZER ); } public Response<LockResult> acquireRelationshipReadLock( SlaveContext context, long... relationships ) { return sendRequest( RequestType.ACQUIRE_RELATIONSHIP_READ_LOCK, context, new AcquireLockSerializer( relationships ), LOCK_RESULT_DESERIALIZER ); } public Response<Long> commitSingleResourceTransaction( SlaveContext context, final String resource, final TransactionStream transactionStream ) { return sendRequest( RequestType.COMMIT, context, new Serializer() { public void write( ChannelBuffer buffer, ByteBuffer readBuffer ) throws IOException { writeString( buffer, resource ); writeTransactionStream(buffer, readBuffer, transactionStream); } }, new Deserializer<Long>() { @SuppressWarnings( "boxing" ) public Long read( ChannelBuffer buffer ) throws IOException { return buffer.readLong(); } }); } public Response<Void> finishTransaction( SlaveContext context ) { try { return sendRequest( RequestType.FINISH, context, new Serializer() { public void write( ChannelBuffer buffer, ByteBuffer readBuffer ) throws IOException { } }, VOID_DESERIALIZER ); } finally { releaseChannel(); } } public void rollbackOngoingTransactions( SlaveContext context ) { throw new UnsupportedOperationException( "Should never be called from the client side" ); } public Response<Void> pullUpdates( SlaveContext context ) { return sendRequest( RequestType.PULL_UPDATES, context, EMPTY_SERIALIZER, VOID_DESERIALIZER ); } public int getMasterIdForCommittedTx( final long txId ) { return sendRequest( RequestType.GET_MASTER_ID_FOR_TX, null, new Serializer() { public void write( ChannelBuffer buffer, ByteBuffer readBuffer ) throws IOException { buffer.writeLong( txId ); } }, INTEGER_DESERIALIZER ).response(); } public ChannelPipeline getPipeline() throws Exception { ChannelPipeline pipeline = Channels.pipeline(); pipeline.addLast( "frameDecoder", new LengthFieldBasedFrameDecoder( MAX_FRAME_LENGTH, 0, 4, 0, 4 ) ); pipeline.addLast( "frameEncoder", new LengthFieldPrepender( 4 ) ); BlockingReadHandler<ChannelBuffer> reader = new BlockingReadHandler<ChannelBuffer>(); pipeline.addLast( "blockingHandler", reader ); return pipeline; } public void shutdown() { msgLog.logMessage( "MasterClient shutdown" ); channelPool.close( true ); } }
package mockit; import java.io.*; import java.lang.annotation.*; import java.lang.reflect.*; import java.util.*; import java.util.concurrent.*; import javax.xml.bind.annotation.*; import org.junit.*; import static org.junit.Assert.*; import mockit.internal.*; @SuppressWarnings("deprecation") public final class DynamicPartialMockingTest { @SuppressWarnings("unused") @Deprecated static class Collaborator { @Deprecated protected int value; Collaborator() { value = -1; } @Deprecated Collaborator(@Deprecated int value) { this.value = value; } final int getValue() { return value; } void setValue(int value) { this.value = value; } final boolean simpleOperation(int a, @XmlElement(name = "test") String b, Date c) { return true; } static void doSomething(boolean b, String s) { throw new IllegalStateException(); } @Ignore("test") boolean methodWhichCallsAnotherInTheSameClass() { return simpleOperation(1, "internal", null); } String overridableMethod() { return "base"; } @Deprecated native void nativeMethod(); void readFile(File f) {} private void initialize() {} } interface Dependency { boolean doSomething(); List<?> doSomethingElse(int n); } @Test public void dynamicallyMockAClass() { final Collaborator toBeMocked = new Collaborator(); new Expectations(Collaborator.class) {{ toBeMocked.getValue(); result = 123; }}; // Not mocked: Collaborator collaborator = new Collaborator(); assertEquals(-1, collaborator.value); assertTrue(collaborator.simpleOperation(1, "b", null)); assertEquals(45, new Collaborator(45).value); // Mocked: assertEquals(123, collaborator.getValue()); } @Test public void dynamicallyMockJREClass() throws Exception { new Expectations(ByteArrayOutputStream.class) {{ new ByteArrayOutputStream().size(); result = 123; }}; // Mocked: ByteArrayOutputStream collaborator = new ByteArrayOutputStream(); assertNull(Deencapsulation.getField(collaborator, "buf")); assertEquals(123, collaborator.size()); // Not mocked: ByteArrayOutputStream buf = new ByteArrayOutputStream(200); buf.write(65); assertEquals("A", buf.toString("UTF-8")); } @Test public void dynamicallyMockClassNonStrictly() { new NonStrictExpectations(Collaborator.class) {{ new Collaborator().getValue(); result = 123; }}; // Mocked: final Collaborator col1 = new Collaborator(); assertEquals(123, col1.getValue()); // Not mocked: final Collaborator col2 = new Collaborator(200); col2.setValue(45); assertEquals(45, col2.value); assertEquals(45, col2.getValue()); new Verifications() {{ col1.getValue(); times = 1; col2.getValue(); times = 1; Collaborator col2Equivalent = new Collaborator(200); times = 1; col2Equivalent.getValue(); times = 1; }}; } @Test public void mockOnlyTheFutureObjectsThatMatchASpecificConstructorInvocation() { final String path1 = "one"; // Not mocked: File f0 = new File(path1); assertFalse(f0.exists()); // Applies partial mocking to all instances. new NonStrictExpectations(File.class) {{ File anyFutureFileWithPath1 = new File(path1); anyFutureFileWithPath1.exists(); result = true; }}; // Mocked: File f1 = new File(path1); assertTrue(f1.exists()); // Not mocked: File f2 = new File("two"); assertFalse(f2.exists()); // Also mocked: File f3 = new File(path1); assertTrue(f3.exists()); // Full verification applies only to mocked instances. new FullVerifications() {{ File anyPastFileWithPath1 = new File(path1); anyPastFileWithPath1.exists(); times = 2; }}; // Invocations to non-mocked instances can also be verified (excluding those existing before mocking was applied). new Verifications() {{ File anyOtherFile = new File(withNotEqual(path1)); anyOtherFile.exists(); times = 1; }}; } @Test public void verifyFutureMockedAndNonMockedObjectsInOrder() { final String path1 = "one"; new NonStrictExpectations(File.class) {{ File anyFutureFileWithPath1 = new File(path1); anyFutureFileWithPath1.exists(); result = true; }}; File f1 = new File(path1); assertTrue(f1.exists()); File f2 = new File("two"); assertFalse(f2.exists()); assertEquals("two", f2.getPath()); assertNull(f1.getPath()); File f3 = new File(path1); assertTrue(f3.exists()); new FullVerificationsInOrder() {{ File anyFileWithPath1 = new File(path1); anyFileWithPath1.exists(); File anyOtherFile = new File(withNotEqual(path1)); anyOtherFile.exists(); anyOtherFile.getPath(); anyFileWithPath1.getPath(); new File(path1); anyFileWithPath1.exists(); }}; } @Test public void dynamicallyMockAnInstance() { final Collaborator collaborator = new Collaborator(); new Expectations(collaborator) {{ collaborator.getValue(); result = 123; }}; // Mocked: assertEquals(123, collaborator.getValue()); // Not mocked: assertTrue(collaborator.simpleOperation(1, "b", null)); assertEquals(45, new Collaborator(45).value); assertEquals(-1, new Collaborator().value); } @Test(expected = MissingInvocation.class) public void expectTwoInvocationsOnStrictDynamicMockButReplayOnce() { final Collaborator collaborator = new Collaborator(); new Expectations(collaborator) {{ collaborator.getValue(); times = 2; }}; assertEquals(0, collaborator.getValue()); } @Test public void expectOneInvocationOnStrictDynamicMockButReplayTwice() { final Collaborator collaborator = new Collaborator(1); new Expectations(collaborator) {{ collaborator.methodWhichCallsAnotherInTheSameClass(); result = false; }}; // Mocked: assertFalse(collaborator.methodWhichCallsAnotherInTheSameClass()); // No longer mocked, since it's strict: assertTrue(collaborator.methodWhichCallsAnotherInTheSameClass()); } @Test public void expectTwoInvocationsOnStrictDynamicMockButReplayMoreTimes() { final Collaborator collaborator = new Collaborator(1); new Expectations(collaborator) {{ collaborator.getValue(); times = 2; }}; // Mocked: assertEquals(0, collaborator.getValue()); assertEquals(0, collaborator.getValue()); // No longer mocked, since it's strict and all expected invocations were already replayed: assertEquals(1, collaborator.getValue()); } @Test(expected = MissingInvocation.class) public void expectTwoOrderedInvocationsOnStrictDynamicMockButReplayOutOfOrder() { final Collaborator collaborator = new Collaborator(1); new Expectations(collaborator) {{ collaborator.setValue(1); collaborator.setValue(2); }}; // Not mocked since the first expectation that can be matched is the one setting the value to 1: collaborator.setValue(2); assertEquals(2, collaborator.value); // Mocked since the first expectation wasn't yet matched by a replayed one: collaborator.setValue(1); assertEquals(2, collaborator.value); // The recorded call to "setValue(2)" is missing at this point. } @Test(expected = UnexpectedInvocation.class) public void nonStrictDynamicMockFullyVerified_verifyOnlyOneOfMultipleRecordedInvocations() { final Collaborator collaborator = new Collaborator(0); new NonStrictExpectations(collaborator) {{ collaborator.setValue(1); collaborator.setValue(2); }}; collaborator.setValue(2); collaborator.setValue(1); // Verifies all the *mocked* (recorded) invocations, ignoring those not mocked: new FullVerifications() {{ collaborator.setValue(1); // Should also verify "setValue(2)" since it was recorded. }}; } @Test public void nonStrictDynamicMockFullyVerified_verifyAllRecordedExpectationsButNotAllOfTheReplayedOnes() { final Collaborator collaborator = new Collaborator(0); new NonStrictExpectations(collaborator) {{ collaborator.setValue(1); }}; collaborator.setValue(1); collaborator.setValue(2); // Verifies all the *mocked* (recorded) invocations, ignoring those not mocked: new FullVerifications() {{ collaborator.setValue(1); // No need to verify "setValue(2)" since it was not recorded. }}; } @Test public void nonStrictDynamicMockFullyVerifiedInOrder_verifyAllRecordedExpectationsButNotAllOfTheReplayedOnes() { final Collaborator collaborator = new Collaborator(0); new NonStrictExpectations(collaborator) {{ collaborator.setValue(2); collaborator.setValue(3); }}; collaborator.setValue(1); collaborator.setValue(2); collaborator.setValue(3); // Verifies all the *mocked* (recorded) invocations, ignoring those not mocked: new FullVerificationsInOrder() {{ // No need to verify "setValue(1)" since it was not recorded. collaborator.setValue(2); collaborator.setValue(3); }}; } @Test public void nonStrictDynamicallyMockedClassFullyVerified_verifyRecordedExpectationButNotReplayedOne() { final Collaborator collaborator = new Collaborator(); new NonStrictExpectations(Collaborator.class) {{ collaborator.simpleOperation(1, "internal", null); result = false; }}; assertFalse(collaborator.methodWhichCallsAnotherInTheSameClass()); new FullVerifications() {{ collaborator.simpleOperation(anyInt, anyString, null); }}; } @Test(expected = MissingInvocation.class) public void expectTwoInvocationsOnNonStrictDynamicMockButReplayOnce() { final Collaborator collaborator = new Collaborator(); new NonStrictExpectations(collaborator) {{ collaborator.getValue(); times = 2; }}; assertEquals(0, collaborator.getValue()); } @Test(expected = UnexpectedInvocation.class) public void expectOneInvocationOnNonStrictDynamicMockButReplayTwice() { final Collaborator collaborator = new Collaborator(1); new NonStrictExpectations(collaborator) {{ collaborator.getValue(); times = 1; }}; // Mocked: assertEquals(0, collaborator.getValue()); // Still mocked because it's non-strict: assertEquals(0, collaborator.getValue()); } @Test public void dynamicallyMockAnInstanceWithNonStrictExpectations() { final Collaborator collaborator = new Collaborator(2); new NonStrictExpectations(collaborator) {{ collaborator.simpleOperation(1, "", null); result = false; Collaborator.doSomething(anyBoolean, "test"); }}; // Mocked: assertFalse(collaborator.simpleOperation(1, "", null)); Collaborator.doSomething(true, "test"); // Not mocked: assertEquals(2, collaborator.getValue()); assertEquals(45, new Collaborator(45).value); assertEquals(-1, new Collaborator().value); try { Collaborator.doSomething(false, null); fail(); } catch (IllegalStateException ignore) {} new Verifications() {{ Collaborator.doSomething(anyBoolean, "test"); collaborator.getValue(); times = 1; }}; } @Test public void mockMethodInSameClass() { final Collaborator collaborator = new Collaborator(); new NonStrictExpectations(collaborator) {{ collaborator.simpleOperation(1, anyString, null); result = false; }}; assertFalse(collaborator.methodWhichCallsAnotherInTheSameClass()); assertTrue(collaborator.simpleOperation(2, "", null)); assertFalse(collaborator.simpleOperation(1, "", null)); } static final class SubCollaborator extends Collaborator { SubCollaborator() { this(1); } SubCollaborator(int value) { super(value); } @Override String overridableMethod() { return super.overridableMethod() + " overridden"; } String format() { return String.valueOf(value); } static void causeFailure() { throw new RuntimeException(); } } @Test public void dynamicallyMockASubCollaboratorInstance() { final SubCollaborator collaborator = new SubCollaborator(); new NonStrictExpectations(collaborator) {{ collaborator.getValue(); result = 5; collaborator.format(); result = "test"; SubCollaborator.causeFailure(); }}; // Mocked: assertEquals(5, collaborator.getValue()); SubCollaborator.causeFailure(); // Not mocked: assertTrue(collaborator.simpleOperation(0, null, null)); // not recorded assertEquals("1", new SubCollaborator().format()); // was recorded but on a different instance try { Collaborator.doSomething(true, null); // not recorded fail(); } catch (IllegalStateException ignore) {} } @Test public void dynamicallyMockClassHierarchyForSpecifiedSubclass() { final SubCollaborator collaborator = new SubCollaborator(); new NonStrictExpectations(SubCollaborator.class) {{ collaborator.getValue(); result = 123; collaborator.format(); result = "test"; }}; // Mocked: assertEquals("test", collaborator.format()); assertEquals(123, collaborator.getValue()); // Not mocked: assertTrue(collaborator.simpleOperation(0, null, null)); // Mocked sub-constructor/not mocked base constructor: assertEquals(-1, new SubCollaborator().value); new VerificationsInOrder() {{ collaborator.format(); new SubCollaborator(); }}; } @Test public void mockTheBaseMethodWhileExercisingTheOverride() { final Collaborator collaborator = new Collaborator(); new Expectations(Collaborator.class) {{ collaborator.overridableMethod(); result = ""; collaborator.overridableMethod(); result = "mocked"; }}; assertEquals("", collaborator.overridableMethod()); assertEquals("mocked overridden", new SubCollaborator().overridableMethod()); } @Test public void dynamicallyMockAnAnonymousClassInstanceThroughTheImplementedInterface() { final Collaborator collaborator = new Collaborator(); final Dependency dependency = new Dependency() { @Override public boolean doSomething() { return false; } @Override public List<?> doSomethingElse(int n) { return null; } }; new NonStrictExpectations(collaborator, dependency) {{ collaborator.getValue(); result = 5; dependency.doSomething(); result = true; }}; // Mocked: assertEquals(5, collaborator.getValue()); assertTrue(dependency.doSomething()); // Not mocked: assertTrue(collaborator.simpleOperation(0, null, null)); assertNull(dependency.doSomethingElse(3)); new FullVerifications() {{ dependency.doSomething(); collaborator.getValue(); dependency.doSomethingElse(anyInt); collaborator.simpleOperation(0, null, null); }}; } @Test public void dynamicallyMockInstanceOfJREClass() { final List<String> list = new LinkedList<String>(); @SuppressWarnings("UseOfObsoleteCollectionType") List<String> anotherList = new Vector<String>(); new NonStrictExpectations(list, anotherList) {{ list.get(1); result = "an item"; list.size(); result = 2; }}; // Use mocked methods: assertEquals(2, list.size()); assertEquals("an item", list.get(1)); // Use unmocked methods: assertTrue(list.add("another")); assertEquals("another", list.remove(0)); anotherList.add("one"); assertEquals("one", anotherList.get(0)); assertEquals(1, anotherList.size()); } public interface AnotherInterface {} @Test public void attemptToUseDynamicMockingForInvalidTypes(@Mocked AnotherInterface mockedInterface) { assertInvalidTypeForDynamicPartialMocking(Dependency.class); assertInvalidTypeForDynamicPartialMocking(Test.class); assertInvalidTypeForDynamicPartialMocking(int[].class); assertInvalidTypeForDynamicPartialMocking(new String[1]); assertInvalidTypeForDynamicPartialMocking(char.class); assertInvalidTypeForDynamicPartialMocking(123); assertInvalidTypeForDynamicPartialMocking(Boolean.class); assertInvalidTypeForDynamicPartialMocking(true); assertInvalidTypeForDynamicPartialMocking(2.5); assertInvalidTypeForDynamicPartialMocking(mockedInterface); Dependency mockInstance = new MockUp<Dependency>() {}.getMockInstance(); assertInvalidTypeForDynamicPartialMocking(mockInstance); } private void assertInvalidTypeForDynamicPartialMocking(Object classOrObject) { try { new Expectations(classOrObject) {}; fail(); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("partial mocking")); } } @Test public void dynamicPartialMockingWithExactArgumentMatching() { final Collaborator collaborator = new Collaborator(); new NonStrictExpectations(collaborator) {{ collaborator.simpleOperation(1, "s", null); result = false; }}; assertFalse(collaborator.simpleOperation(1, "s", null)); assertTrue(collaborator.simpleOperation(2, "s", null)); assertTrue(collaborator.simpleOperation(1, "S", null)); assertTrue(collaborator.simpleOperation(1, "s", new Date())); assertTrue(collaborator.simpleOperation(1, null, new Date())); assertFalse(collaborator.simpleOperation(1, "s", null)); new FullVerifications() {{ collaborator.simpleOperation(anyInt, null, null); }}; } @Test public void dynamicPartialMockingWithFlexibleArgumentMatching() { final Collaborator mock = new Collaborator(); new NonStrictExpectations(mock) {{ mock.simpleOperation(anyInt, withPrefix("s"), null); result = false; }}; assertFalse(mock.simpleOperation(1, "sSs", null)); assertTrue(mock.simpleOperation(2, " s", null)); assertTrue(mock.simpleOperation(1, "S", null)); assertFalse(mock.simpleOperation(-1, "s", new Date())); assertTrue(mock.simpleOperation(1, null, null)); assertFalse(mock.simpleOperation(0, "string", null)); Collaborator collaborator = new Collaborator(); assertTrue(collaborator.simpleOperation(1, "sSs", null)); assertTrue(collaborator.simpleOperation(-1, null, new Date())); } @Test public void dynamicPartialMockingWithInstanceSpecificMatching() { final Collaborator collaborator1 = new Collaborator(); final Collaborator collaborator2 = new Collaborator(4); new NonStrictExpectations(collaborator1, collaborator2) {{ collaborator1.getValue(); result = 3; }}; assertEquals(3, collaborator1.getValue()); assertEquals(4, collaborator2.getValue()); new FullVerificationsInOrder() {{ collaborator1.getValue(); times = 1; collaborator2.getValue(); times = 1; }}; } @Test public void dynamicPartialMockingWithInstanceSpecificMatchingOnTwoInstancesOfSameClass() { final Collaborator mock1 = new Collaborator(); final Collaborator mock2 = new Collaborator(); new NonStrictExpectations(mock1, mock2) {{ mock1.getValue(); result = 1; mock2.getValue(); result = 2; }}; assertEquals(2, mock2.getValue()); assertEquals(1, mock1.getValue()); new FullVerifications() {{ mock1.getValue(); times = 1; mock2.getValue(); times = 1; }}; } @Test public void methodWithNoRecordedExpectationCalledTwiceDuringReplay() { final Collaborator collaborator = new Collaborator(123); new NonStrictExpectations(collaborator) {}; assertEquals(123, collaborator.getValue()); assertEquals(123, collaborator.getValue()); new FullVerifications() {{ collaborator.getValue(); times = 2; }}; } static final class TaskWithConsoleInput { boolean finished; void doIt() { int input = '\0'; while (input != 'A') { try { input = System.in.read(); } catch (IOException e) { throw new RuntimeException(e); } if (input == 'Z') { finished = true; break; } } } } private boolean runTaskWithTimeout(long timeoutInMillis) throws InterruptedException, ExecutionException { final TaskWithConsoleInput task = new TaskWithConsoleInput(); Runnable asynchronousTask = new Runnable() { @Override public void run() { task.doIt(); } }; ExecutorService executor = Executors.newSingleThreadExecutor(); while (!task.finished) { Future<?> worker = executor.submit(asynchronousTask); try { worker.get(timeoutInMillis, TimeUnit.MILLISECONDS); } catch (TimeoutException ignore) { executor.shutdownNow(); return false; } } return true; } @Test public void taskWithConsoleInputTerminatingNormally() throws Exception { new Expectations(System.in) {{ System.in.read(); returns((int) 'A', (int) 'x', (int) 'Z'); }}; assertTrue(runTaskWithTimeout(5000)); } @Test public void taskWithConsoleInputTerminatingOnTimeout() throws Exception { new Expectations(System.in) {{ System.in.read(); result = new Delegate() { @Mock void takeTooLong() throws InterruptedException { Thread.sleep(5000); } }; }}; assertFalse("no timeout", runTaskWithTimeout(10)); } static class ClassWithStaticInitializer { static boolean initialized = true; static int doSomething() { return initialized ? 1 : -1; } } @Test public void doNotStubOutStaticInitializersWhenDynamicallyMockingAClass() { new Expectations(ClassWithStaticInitializer.class) {{ ClassWithStaticInitializer.doSomething(); result = 2; }}; assertEquals(2, ClassWithStaticInitializer.doSomething()); assertTrue(ClassWithStaticInitializer.initialized); } static final class ClassWithNative { int doSomething() { return nativeMethod(); } private native int nativeMethod(); } @Test(expected = UnsatisfiedLinkError.class) public void attemptToPartiallyMockNativeMethod() { final ClassWithNative mock = new ClassWithNative(); new Expectations(mock) {{ // The native method is ignored when using dynamic mocking, so this actually tries to execute the real method, // failing since there is no native implementation. mock.nativeMethod(); }}; } @Test // with FileIO compiled with "target 1.1", this produced a VerifyError public void mockClassCompiledForJava11() throws Exception { final FileIO f = new FileIO(); new Expectations(f) {{ f.writeToFile("test"); }}; f.writeToFile("test"); } static class Base { Base(boolean b) { if (!b) throw new IllegalAccessError(); } } static class Derived extends Base { Derived() { super(true); } } @Ignore @Test public void mockConstructorsInClassHierarchyWithMockedCallToSuperWhichChecksArgumentReceived() { new Expectations(Derived.class) {}; new Derived(); } static class Base2 { final int i; Base2(int i) { this.i = i; } } static class Derived2 extends Base2 { Derived2(int i) { super(i); } } @Ignore @Test public void mockConstructorsInClassHierarchyWithMockedCallToSuper() { new NonStrictExpectations(Derived2.class) {}; Derived2 d = new Derived2(123); assertEquals(123, d.i); } static class AClass { static int i = -1; AClass() { this(123); } AClass(int i) { AClass.i = i; } } @Ignore @Test public void mockConstructorsInSingleClassWithMockedCallToThis() { new NonStrictExpectations(AClass.class) {}; new AClass(); assertEquals(123, AClass.i); } @Test public void mockedClassWithAnnotatedElements() throws Exception { new NonStrictExpectations(Collaborator.class) {}; Collaborator mock = new Collaborator(123); Class<?> mockedClass = mock.getClass(); assertTrue(mockedClass.isAnnotationPresent(Deprecated.class)); assertTrue(mockedClass.getDeclaredField("value").isAnnotationPresent(Deprecated.class)); Method mockedMethod1 = mockedClass.getDeclaredMethod("simpleOperation", int.class, String.class, Date.class); Annotation xmlElement = mockedMethod1.getParameterAnnotations()[1][0]; assertTrue(xmlElement instanceof XmlElement); assertEquals("test", ((XmlElement) xmlElement).name()); Constructor<?> mockedConstructor = mockedClass.getDeclaredConstructor(int.class); assertTrue(mockedConstructor.isAnnotationPresent(Deprecated.class)); assertTrue(mockedConstructor.getParameterAnnotations()[0][0] instanceof Deprecated); Method mockedMethod2 = mockedClass.getDeclaredMethod("methodWhichCallsAnotherInTheSameClass"); Ignore ignore = mockedMethod2.getAnnotation(Ignore.class); assertNotNull(ignore); assertEquals("test", ignore.value()); assertTrue(mockedClass.getDeclaredMethod("nativeMethod").isAnnotationPresent(Deprecated.class)); } @Test public void regularMockedMethodCallingOverriddenEqualsInDynamicallyMockedClass(@Mocked final Collaborator mock) { @SuppressWarnings("TooBroadScope") final File f = new File("test"); new NonStrictExpectations(File.class) {}; mock.readFile(new File("test")); new Verifications() {{ mock.readFile(f); }}; } static final class TestedClass { private boolean value; TestedClass() { this(true); } TestedClass(boolean value) { initialize(value); } private void initialize(boolean flag) { value = flag; } } @Test public void mockClassWithConstructorWhichCallsPrivateMethod() { new NonStrictExpectations(TestedClass.class) {}; assertTrue(new TestedClass(true).value); final TestedClass t = new TestedClass(false); assertFalse(t.value); new Verifications() {{ new TestedClass(anyBoolean); times = 2; t.initialize(anyBoolean); times = 2; }}; } }
package com.python.pydev.refactoring.markoccurrences; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.eclipse.core.runtime.AssertionFailedException; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.OperationCanceledException; import org.eclipse.core.runtime.Status; import org.eclipse.core.runtime.jobs.Job; import org.eclipse.jface.action.IAction; import org.eclipse.jface.text.BadLocationException; import org.eclipse.jface.text.IDocument; import org.eclipse.jface.text.IRegion; import org.eclipse.jface.text.ISynchronizable; import org.eclipse.jface.text.Position; import org.eclipse.jface.text.source.Annotation; import org.eclipse.jface.text.source.IAnnotationModel; import org.eclipse.jface.text.source.IAnnotationModelExtension; import org.eclipse.ui.IEditorInput; import org.eclipse.ui.texteditor.IDocumentProvider; import org.python.pydev.core.Tuple3; import org.python.pydev.core.docutils.PySelection; import org.python.pydev.core.log.Log; import org.python.pydev.editor.PyEdit; import org.python.pydev.editor.actions.refactoring.PyRefactorAction; import org.python.pydev.editor.codefolding.PySourceViewer; import org.python.pydev.editor.refactoring.AbstractPyRefactoring; import org.python.pydev.editor.refactoring.IPyRefactoring; import org.python.pydev.editor.refactoring.RefactoringRequest; import org.python.pydev.parser.jython.SimpleNode; import org.python.pydev.parser.jython.ast.Name; import org.python.pydev.parser.visitors.scope.ASTEntry; import com.python.pydev.PydevPlugin; import com.python.pydev.refactoring.refactorer.AstEntryRefactorerRequestConstants; import com.python.pydev.refactoring.ui.MarkOccurrencesPreferencesPage; import com.python.pydev.refactoring.wizards.rename.PyRenameEntryPoint; /** * This is a 'low-priority' thread. It acts as a singleton. Requests to mark the occurrences * will be forwarded to it, so, it should sleep for a while and then check for a request. * * If the request actually happened, it will go on to process it, otherwise it will sleep some more. * * @author Fabio */ public class MarkOccurrencesJob extends Job{ private static final boolean DEBUG = false; private static MarkOccurrencesJob singleton; /** * Make it thread safe */ private static volatile long lastRequestTime = -1; /** * This is the editor to be analyzed */ private WeakReference<PyEdit> editor; /** * This is the request time for this job */ private long currRequestTime = -1; private MarkOccurrencesJob(WeakReference<PyEdit> editor) { super("MarkOccurrencesJob"); setPriority(Job.BUILD); setSystem(true); this.editor = editor; currRequestTime = System.currentTimeMillis(); } @SuppressWarnings("unchecked") public IStatus run(IProgressMonitor monitor) { if(currRequestTime == -1){ return Status.OK_STATUS; } if(currRequestTime == lastRequestTime){ return Status.OK_STATUS; } lastRequestTime = currRequestTime; try { final PyEdit pyEdit = editor.get(); if(pyEdit == null || monitor.isCanceled()){ return Status.OK_STATUS; } try{ IDocumentProvider documentProvider = pyEdit.getDocumentProvider(); if(documentProvider == null || monitor.isCanceled()){ return Status.OK_STATUS; } IAnnotationModel annotationModel= documentProvider.getAnnotationModel(pyEdit.getEditorInput()); if(annotationModel == null || monitor.isCanceled()){ return Status.OK_STATUS; } Tuple3<RefactoringRequest,PyRenameEntryPoint,Boolean> ret = checkAnnotations(pyEdit, documentProvider, monitor); if(pyEdit.cache == null || monitor.isCanceled()){ //disposed (cannot add or remove annotations) return Status.OK_STATUS; } PySourceViewer viewer = pyEdit.getPySourceViewer(); if(viewer == null || monitor.isCanceled()){ return Status.OK_STATUS; } if(viewer.getIsInToggleCompletionStyle() || monitor.isCanceled()){ return Status.OK_STATUS; } if(ret.o3){ if(!addAnnotations(pyEdit, annotationModel, ret.o1, ret.o2)){ //something went wrong, so, let's remove the occurrences removeOccurenceAnnotations(annotationModel, pyEdit); } }else{ removeOccurenceAnnotations(annotationModel, pyEdit); } } catch (OperationCanceledException e) { throw e;//rethrow this error... } catch (AssertionFailedException e) { String message = e.getMessage(); if(message.indexOf("The file:") != -1 && message.indexOf("does not exist.") != -1){ //don't even report it (the file was probably removed while we were doing the analysis) }else{ Log.log(e); Log.log("Error while analyzing the file:"+pyEdit.getIFile()); } } catch (Throwable initialE) { Throwable e = initialE; int i = 0; while(e.getCause() != null && e.getCause() != e && i < 30){ e = e.getCause(); i++;//safeguard for recursion } if(e instanceof BadLocationException){ //ignore (may have changed during the analysis) }else{ Log.log(initialE); Log.log("Error while analyzing the file:"+pyEdit.getIFile()); } } } catch (Throwable e) { // Log.log(e); -- ok, remove this log, as things can happen if the user starts editing after the analysis is requested } return Status.OK_STATUS; } /** * @return a tuple with the refactoring request, the processor and a boolean indicating if all pre-conditions succedded. */ private Tuple3<RefactoringRequest,PyRenameEntryPoint,Boolean> checkAnnotations(PyEdit pyEdit, IDocumentProvider documentProvider, IProgressMonitor monitor) throws BadLocationException, OperationCanceledException, CoreException { if(!MarkOccurrencesPreferencesPage.useMarkOccurrences()){ return new Tuple3<RefactoringRequest,PyRenameEntryPoint,Boolean>(null,null,false); } //now, let's see if the editor still has a document (so that we still can add stuff to it) IEditorInput editorInput = pyEdit.getEditorInput(); if(editorInput == null){ return new Tuple3<RefactoringRequest,PyRenameEntryPoint,Boolean>(null,null,false); } if(documentProvider.getDocument(editorInput) == null){ return new Tuple3<RefactoringRequest,PyRenameEntryPoint,Boolean>(null,null,false); } if(pyEdit.getSelectionProvider() == null){ return new Tuple3<RefactoringRequest,PyRenameEntryPoint,Boolean>(null,null,false); } //ok, the editor is still there wit ha document... move on PyRefactorAction pyRefactorAction = getRefactorAction(pyEdit); final RefactoringRequest req = getRefactoringRequest(pyEdit, pyRefactorAction); if(req == null || !req.nature.getRelatedInterpreterManager().isConfigured()){ //we check if it's configured because it may still be a stub... return new Tuple3<RefactoringRequest,PyRenameEntryPoint,Boolean>(null,null,false); } PyRenameEntryPoint processor = new PyRenameEntryPoint(req); //to see if a new request was not created in the meantime (in which case this one will be cancelled) if (currRequestTime != lastRequestTime || monitor.isCanceled()) { return new Tuple3<RefactoringRequest,PyRenameEntryPoint,Boolean>(null,null,false); } try{ processor.checkInitialConditions(monitor); if (currRequestTime != lastRequestTime || monitor.isCanceled()) { return new Tuple3<RefactoringRequest,PyRenameEntryPoint,Boolean>(null,null,false); } processor.checkFinalConditions(monitor, null); if (currRequestTime != lastRequestTime || monitor.isCanceled()) { return new Tuple3<RefactoringRequest,PyRenameEntryPoint,Boolean>(null,null,false); } //ok, pre-conditions suceeded return new Tuple3<RefactoringRequest,PyRenameEntryPoint,Boolean>(req,processor,true); }catch(Throwable e){ throw new RuntimeException("Error in occurrences while analyzing modName:"+req.moduleName+" initialName:"+req.initialName+" line (start at 0):"+req.ps.getCursorLine(), e); } } /** * @return true if the annotations were removed and added without any problems and false otherwise */ private synchronized boolean addAnnotations(final PyEdit pyEdit, IAnnotationModel annotationModel, final RefactoringRequest req, PyRenameEntryPoint processor) throws BadLocationException { //add the annotations synchronized (getLockObject(annotationModel)) { List<ASTEntry> occurrences = processor.getOccurrences(); if(occurrences != null){ Map<String, Object> cache = pyEdit.cache; if(cache == null){ return false; } IDocument doc = pyEdit.getDocument(); ArrayList<Annotation> annotations = new ArrayList<Annotation>(); Map<Annotation, Position> toAddAsMap = new HashMap<Annotation, Position>(); boolean markOccurrencesInStrings = MarkOccurrencesPreferencesPage.useMarkOccurrencesInStrings(); for (ASTEntry entry : occurrences) { if(!markOccurrencesInStrings){ if(entry.node instanceof Name){ Name name = (Name) entry.node; if(name.ctx == Name.Artificial){ continue; } } } SimpleNode node = entry.getNameNode(); IRegion lineInformation = doc.getLineInformation(node.beginLine-1); try { Annotation annotation = new Annotation(PydevPlugin.OCCURRENCE_ANNOTATION_TYPE, false, "occurrence"); Position position = new Position(lineInformation.getOffset() + node.beginColumn - 1, req.initialName.length()); toAddAsMap.put(annotation, position); annotations.add(annotation); } catch (Exception e) { Log.log(e); } } //get the ones to remove List<Annotation> toRemove = PydevPlugin.getOccurrenceAnnotationsInPyEdit(pyEdit); //replace them IAnnotationModelExtension ext = (IAnnotationModelExtension) annotationModel; ext.replaceAnnotations(toRemove.toArray(new Annotation[0]), toAddAsMap); //put them in the pyEdit cache.put(PydevPlugin.ANNOTATIONS_CACHE_KEY, annotations); }else{ if(DEBUG){ System.out.println("Occurrences == null"); } return false; } } return true; } public static RefactoringRequest getRefactoringRequest(final PyEdit pyEdit, PyRefactorAction pyRefactorAction) throws BadLocationException { return getRefactoringRequest(pyEdit, pyRefactorAction, null); } /** * @param pyEdit the editor where we should look for the occurrences * @param pyRefactorAction the action that will return the initial refactoring request * @param ps the pyselection used (if null it will be created in this method) * @return a refactoring request suitable for finding the locals in the file * @throws BadLocationException */ public static RefactoringRequest getRefactoringRequest(final PyEdit pyEdit, PyRefactorAction pyRefactorAction, PySelection ps) throws BadLocationException { final RefactoringRequest req = pyRefactorAction.getRefactoringRequest(); req.ps = PySelection.createFromNonUiThread(pyEdit); if(req.ps == null){ return null; } req.fillInitialNameAndOffset(); req.inputName = "foo"; req.setAdditionalInfo(AstEntryRefactorerRequestConstants.FIND_DEFINITION_IN_ADDITIONAL_INFO, false); req.setAdditionalInfo(AstEntryRefactorerRequestConstants.FIND_REFERENCES_ONLY_IN_LOCAL_SCOPE, true); return req; } /** * @param pyEdit the editor that will have this action * @return the action (with the pyedit attached to it) */ public static PyRefactorAction getRefactorAction(PyEdit pyEdit) { PyRefactorAction pyRefactorAction = new PyRefactorAction(){ @Override protected IPyRefactoring getPyRefactoring() { return AbstractPyRefactoring.getPyRefactoring(); } @Override protected String perform(IAction action, String name, IProgressMonitor monitor) throws Exception { throw new RuntimeException("Perform should not be called in this case."); } @Override protected String getInputMessage() { return null; } }; pyRefactorAction.setEditor(pyEdit); return pyRefactorAction; } /** * @param annotationModel */ @SuppressWarnings("unchecked") private synchronized void removeOccurenceAnnotations(IAnnotationModel annotationModel, PyEdit pyEdit) { //remove the annotations synchronized(getLockObject(annotationModel)){ Map<String, Object> cache = pyEdit.cache; if(cache == null){ return; } Iterator<Annotation> annotationIterator = PydevPlugin.getOccurrenceAnnotationsInPyEdit(pyEdit).iterator(); while(annotationIterator.hasNext()){ annotationModel.removeAnnotation(annotationIterator.next()); } cache.put(PydevPlugin.ANNOTATIONS_CACHE_KEY, null); } //end remove the annotations } /** * Gotten from JavaEditor#getLockObject */ private Object getLockObject(IAnnotationModel annotationModel) { if (annotationModel instanceof ISynchronizable) return ((ISynchronizable)annotationModel).getLockObject(); else return annotationModel; } /** * This is the function that should be called when we want to schedule a request for * a mark occurrences job. */ public static synchronized void scheduleRequest(WeakReference<PyEdit> editor2) { MarkOccurrencesJob j = singleton; if(j != null){ synchronized (j) { j.cancel(); singleton = null; } } singleton = new MarkOccurrencesJob(editor2); singleton.schedule(750); } }
package test.beast.app.beauti; import java.io.File; import org.fest.swing.fixture.JTabbedPaneFixture; import org.junit.Test; public class SimpleTreePriorTest extends BeautiBase { /** check the standard tree priors are there and result in correct behaviour **/ @Test public void simpleTreePriorTest() throws Exception { warning("Load anolis.nex"); importAlignment("examples/nexus", new File("anolis.nex")); JTabbedPaneFixture f = beautiFrame.tabbedPane(); f.selectTab("Priors"); warning("Change to Coalescent - constant population"); beautiFrame.comboBox("TreeDistribution").selectItem("Coalescent Constant Population"); printBeautiState(f); assertStateEquals("Tree.t:anolis", "popSize.t:anolis"); assertOperatorsEqual("treeScaler.t:anolis", "treeRootScaler.t:anolis", "UniformOperator.t:anolis", "SubtreeSlide.t:anolis", "narrow.t:anolis", "wide.t:anolis", "WilsonBalding.t:anolis", "PopSizeScaler.t:anolis"); assertPriorsEqual("CoalescentConstant.t:anolis", "PopSizePrior.t:anolis"); assertTraceLogEqual("posterior", "likelihood", "prior", "treeLikelihood.anolis", "TreeHeight.t:anolis", "popSize.t:anolis", "CoalescentConstant.t:anolis"); warning("Change to Coalescent - exponential population"); beautiFrame.comboBox("TreeDistribution").selectItem("Coalescent Exponential Population"); printBeautiState(f); assertStateEquals("Tree.t:anolis", "ePopSize.t:anolis", "growthRate.t:anolis"); assertOperatorsEqual("treeScaler.t:anolis", "treeRootScaler.t:anolis", "UniformOperator.t:anolis", "SubtreeSlide.t:anolis", "narrow.t:anolis", "wide.t:anolis", "WilsonBalding.t:anolis", "ePopSizeScaler.t:anolis", "GrowthRateRandomWalk.t:anolis"); assertPriorsEqual("CoalescentExponential.t:anolis", "ePopSizePrior.t:anolis", "GrowthRatePrior.t:anolis"); assertTraceLogEqual("posterior", "likelihood", "prior", "treeLikelihood.anolis", "TreeHeight.t:anolis", "CoalescentExponential.t:anolis", "ePopSize.t:anolis", "growthRate.t:anolis"); warning("Change to Coalescent - BPS"); beautiFrame.comboBox("TreeDistribution").selectItem("Coalescent Bayesian Skyline"); printBeautiState(f); assertStateEquals("Tree.t:anolis", "bPopSizes.t:anolis", "bGroupSizes.t:anolis"); assertOperatorsEqual("treeScaler.t:anolis", "treeRootScaler.t:anolis", "UniformOperator.t:anolis", "SubtreeSlide.t:anolis", "narrow.t:anolis", "wide.t:anolis", "WilsonBalding.t:anolis", "popSizesScaler.t:anolis", "groupSizesDelta.t:anolis"); assertPriorsEqual("BayesianSkyline.t:anolis", "MarkovChainedPopSizes.t:anolis"); assertTraceLogEqual("posterior", "likelihood", "prior", "treeLikelihood.anolis", "TreeHeight.t:anolis", "BayesianSkyline.t:anolis", "bPopSizes.t:anolis", "bGroupSizes.t:anolis"); warning("Change to Yule"); beautiFrame.comboBox("TreeDistribution").selectItem("Yule Model"); printBeautiState(f); assertStateEquals("Tree.t:anolis", "birthRate.t:anolis"); assertOperatorsEqual("treeScaler.t:anolis", "treeRootScaler.t:anolis", "UniformOperator.t:anolis", "SubtreeSlide.t:anolis", "narrow.t:anolis", "wide.t:anolis", "WilsonBalding.t:anolis", "YuleBirthRateScaler.t:anolis"); assertPriorsEqual("YuleModel.t:anolis", "YuleBirthRatePrior.t:anolis"); assertTraceLogEqual("posterior", "likelihood", "prior", "treeLikelihood.anolis", "TreeHeight.t:anolis", "YuleModel.t:anolis", "birthRate.t:anolis"); warning("Change to Birth-Death"); beautiFrame.comboBox("TreeDistribution").selectItem("Birth Death Model"); printBeautiState(f); assertStateEquals("Tree.t:anolis", "birthRate2.t:anolis", "relativeDeathRate2.t:anolis"); assertOperatorsEqual("treeScaler.t:anolis", "treeRootScaler.t:anolis", "UniformOperator.t:anolis", "SubtreeSlide.t:anolis", "narrow.t:anolis", "wide.t:anolis", "WilsonBalding.t:anolis", "BirthRateScaler.t:anolis", "DeathRateScaler.t:anolis"); assertPriorsEqual("BirthDeath.t:anolis", "BirthRatePrior.t:anolis", "DeathRatePrior.t:anolis"); assertTraceLogEqual("posterior", "likelihood", "prior", "treeLikelihood.anolis", "TreeHeight.t:anolis", "BirthDeath.t:anolis", "birthRate2.t:anolis", "relativeDeathRate2.t:anolis"); makeSureXMLParses(); } }
package com.inari.commons; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import com.inari.commons.geom.Direction; import com.inari.commons.geom.Orientation; import org.junit.Test; import com.inari.commons.geom.Position; import com.inari.commons.geom.Rectangle; public class GeomUtilsTest { @Test public void testDistance() { Position p1 = new Position( 0, 0 ); Position p2 = new Position( 1, 0 ); assertEquals( "1.0", String.valueOf( GeomUtils.getDistance( p1, p2 ) ) ); assertTrue( 1.0f == GeomUtils.getDistance( p1, p2 ) ); p2 = new Position( 0, 1 ); assertEquals( "1.0", String.valueOf( GeomUtils.getDistance( p1, p2 ) ) ); assertTrue( 1.0f == GeomUtils.getDistance( p1, p2 ) ); p2 = new Position( 1, 1 ); assertEquals( "1.4142135", String.valueOf( GeomUtils.getDistance( p1, p2 ) ) ); assertTrue( 1.4142135f == GeomUtils.getDistance( p1, p2 ) ); p2 = new Position( 2, 1 ); assertEquals( "2.236068", String.valueOf( GeomUtils.getDistance( p1, p2 ) ) ); assertTrue( 2.236068f == GeomUtils.getDistance( p1, p2 ) ); p2 = new Position( 2, 2 ); assertEquals( "2.828427", String.valueOf( GeomUtils.getDistance( p1, p2 ) ) ); assertTrue( 2.828427f == GeomUtils.getDistance( p1, p2 ) ); p2 = new Position( 4, 2 ); assertEquals( "4.472136", String.valueOf( GeomUtils.getDistance( p1, p2 ) ) ); assertTrue( 4.472136f == GeomUtils.getDistance( p1, p2 ) ); } @Test public void testIntersect() { Rectangle r1 = new Rectangle( 0, 0, 100, 100 ); Rectangle r2 = new Rectangle( 0, 0, 100, 100 ); assertTrue( GeomUtils.intersect( r1, r2 ) ); r2 = new Rectangle( 100, 0, 0, 0 ); assertFalse( GeomUtils.intersect( r1, r2 ) ); r2 = new Rectangle( 101, 0, 0, 0 ); assertFalse( GeomUtils.intersect( r1, r2 ) ); r2 = new Rectangle( 0, 100, 0, 0 ); assertFalse( GeomUtils.intersect( r1, r2 ) ); r2 = new Rectangle( 0, 101, 0, 0 ); assertFalse( GeomUtils.intersect( r1, r2 ) ); r2 = new Rectangle( 0, 0, 0, 0 ); assertFalse( GeomUtils.intersect( r1, r2 ) ); r2 = new Rectangle( 0, -1, 0, 0 ); assertFalse( GeomUtils.intersect( r1, r2 ) ); r2 = new Rectangle( 0, -1, 0, 1 ); assertFalse( GeomUtils.intersect( r1, r2 ) ); r2 = new Rectangle( -1, 0, 0, 0 ); assertFalse( GeomUtils.intersect( r1, r2 ) ); r2 = new Rectangle( -1, 0, 1, 0 ); assertFalse( GeomUtils.intersect( r1, r2 ) ); r2 = new Rectangle( 100, 100, 2, 2 ); assertFalse( GeomUtils.intersect( r1, r2 ) ); assertFalse( GeomUtils.intersect( r2, r1 ) ); } @Test public void testGetIntersectionCode() { Rectangle r1 = new Rectangle( 0, 0, 100, 100 ); Rectangle r2 = new Rectangle( 50, 50, 100, 100 ); assertEquals( "17", String.valueOf( GeomUtils.getIntersectionCode( r1, r2 ) ) ); assertEquals( "12", String.valueOf( GeomUtils.getIntersectionCode( r2, r1 ) ) ); r2 = new Rectangle( 100, 100, 100, 100 ); assertEquals( "0", String.valueOf( GeomUtils.getIntersectionCode( r1, r2 ) ) ); r2 = new Rectangle( 99, 99, 100, 100 ); assertEquals( "17", String.valueOf( GeomUtils.getIntersectionCode( r1, r2 ) ) ); r2 = new Rectangle( 100, 99, 100, 100 ); assertEquals( "0", String.valueOf( GeomUtils.getIntersectionCode( r1, r2 ) ) ); r2 = new Rectangle( 50, 50, 100, 10 ); assertEquals( "25", String.valueOf( GeomUtils.getIntersectionCode( r1, r2 ) ) ); assertEquals( "4", String.valueOf( GeomUtils.getIntersectionCode( r2, r1 ) ) ); } @Test public void testIntersection() { Rectangle r1 = new Rectangle( 0, 0, 100, 100 ); Rectangle r2 = new Rectangle( 50, 50, 100, 100 ); Rectangle intersection = GeomUtils.intersection( r1, r2 ); assertEquals( "[x=50,y=50,width=50,height=50]", intersection.toString() ); intersection = GeomUtils.intersection( r2, r1 ); assertEquals( "[x=50,y=50,width=50,height=50]", intersection.toString() ); assertTrue( intersection.area() > 0 ); r2 = new Rectangle( 100, 100, 100, 100 ); intersection = GeomUtils.intersection( r1, r2 ); assertEquals( "[x=100,y=100,width=0,height=0]", intersection.toString() ); assertFalse( intersection.area() > 0 ); r2 = new Rectangle( 101, 100, 100, 100 ); intersection = GeomUtils.intersection( r1, r2 ); assertEquals( "[x=101,y=100,width=0,height=0]", intersection.toString() ); assertFalse( intersection.area() > 0 ); r2 = new Rectangle( 101, 101, 100, 100 ); intersection = GeomUtils.intersection( r1, r2 ); assertEquals( "[x=101,y=101,width=0,height=0]", intersection.toString() ); assertFalse( intersection.area() > 0 ); r2 = new Rectangle( 99, 101, 100, 100 ); intersection = GeomUtils.intersection( r1, r2 ); assertEquals( "[x=99,y=101,width=1,height=0]", intersection.toString() ); assertFalse( intersection.area() > 0 ); r2 = new Rectangle( 99, 99, 100, 100 ); intersection = GeomUtils.intersection( r1, r2 ); assertEquals( "[x=99,y=99,width=1,height=1]", intersection.toString() ); assertTrue( intersection.area() > 0 ); r2 = new Rectangle( -10, -10, 50, 50 ); intersection = GeomUtils.intersection( r1, r2 ); assertEquals( "[x=0,y=0,width=40,height=40]", intersection.toString() ); assertTrue( intersection.area() > 0 ); } @Test public void testIntersection2() { Rectangle r1 = new Rectangle( 0, 0, 8, 8 ); Rectangle r2 = new Rectangle( 0, 0, 8, 8 ); Rectangle intersection = GeomUtils.intersection( r1, r2 ); assertEquals( "[x=0,y=0,width=8,height=8]", intersection.toString() ); r2.x++; intersection = GeomUtils.intersection( r1, r2 ); assertEquals( "[x=1,y=0,width=7,height=8]", intersection.toString() ); r2.x = 7; intersection = GeomUtils.intersection( r1, r2 ); assertEquals( "[x=7,y=0,width=1,height=8]", intersection.toString() ); } @Test public void testIntersectionAA() { assertEquals( "0", String.valueOf( GeomUtils.intersection( 0, 10, 20, 10 ) ) ); assertEquals( "0", String.valueOf( GeomUtils.intersection( 100, 10, 20, 10 ) ) ); assertEquals( "10", String.valueOf( GeomUtils.intersection( 0, 100, 20, 10 ) ) ); assertEquals( "10", String.valueOf( GeomUtils.intersection( 20, 10, 0, 100 ) ) ); assertEquals( "20", String.valueOf( GeomUtils.intersection( 10, 100, 0, 30 ) ) ); assertEquals( "10", String.valueOf( GeomUtils.intersection( 0, 10, -10, 30 ) ) ); assertEquals( "5", String.valueOf( GeomUtils.intersection( 0, 10, -10, 15 ) ) ); } @Test public void testIntersectionWithResult() { Rectangle r1 = new Rectangle( 0,0,10,10 ); Rectangle r2 = new Rectangle( 10,0,10,10 ); Rectangle r3 = new Rectangle( 20,0,10,10 ); Rectangle intersection = new Rectangle(); GeomUtils.intersection( r1, r2, intersection ); assertEquals( "[x=10,y=0,width=0,height=10]", intersection.toString() ); GeomUtils.intersection( r2, r1, intersection ); assertEquals( "[x=10,y=0,width=0,height=10]", intersection.toString() ); GeomUtils.intersection( r2, r3, intersection ); assertEquals( "[x=20,y=0,width=0,height=10]", intersection.toString() ); } @Test public void testContains() { Rectangle r1 = new Rectangle( 1, 1, 10, 10 ); assertTrue( GeomUtils.contains( r1, 1, 1 ) ); assertTrue( GeomUtils.contains( r1, 5, 5 ) ); assertTrue( GeomUtils.contains( r1, 1, 10 ) ); assertTrue( GeomUtils.contains( r1, 10, 1 ) ); assertFalse( GeomUtils.contains( r1, 0, 0 ) ); assertFalse( GeomUtils.contains( r1, 0, 1 ) ); assertFalse( GeomUtils.contains( r1, 1, 0 ) ); assertFalse( GeomUtils.contains( r1, 1, 11 ) ); assertFalse( GeomUtils.contains( r1, 11, 1 ) ); } @Test public void movePositionTest() { Position p = new Position( 0, 0 ); assertEquals( "[x=0,y=0]", p.toString() ); GeomUtils.movePosition( p, Orientation.EAST ); assertEquals( "[x=1,y=0]", p.toString() ); GeomUtils.movePosition( p, Orientation.SOUTH ); assertEquals( "[x=1,y=1]", p.toString() ); GeomUtils.movePosition( p, Orientation.WEST ); assertEquals( "[x=0,y=1]", p.toString() ); GeomUtils.movePosition( p, Orientation.NORTH ); assertEquals( "[x=0,y=0]", p.toString() ); GeomUtils.movePosition( p, Orientation.EAST, 1 ); assertEquals( "[x=1,y=0]", p.toString() ); GeomUtils.movePosition( p, Orientation.SOUTH, 2 ); assertEquals( "[x=1,y=2]", p.toString() ); GeomUtils.movePosition( p, Orientation.WEST, 3 ); assertEquals( "[x=-2,y=2]", p.toString() ); GeomUtils.movePosition( p, Orientation.NORTH, 4 ); assertEquals( "[x=-2,y=-2]", p.toString() ); p.x = 0; p.y = 0; GeomUtils.movePosition( p, Orientation.EAST,1, false ); assertEquals( "[x=1,y=0]", p.toString() ); GeomUtils.movePosition( p, Orientation.SOUTH,1, false ); assertEquals( "[x=1,y=-1]", p.toString() ); GeomUtils.movePosition( p, Orientation.WEST,1, false ); assertEquals( "[x=0,y=-1]", p.toString() ); GeomUtils.movePosition( p, Orientation.NORTH,1, false ); assertEquals( "[x=0,y=0]", p.toString() ); GeomUtils.movePosition( p, Direction.NORTH_EAST, 1, true ); assertEquals( "[x=1,y=-1]", p.toString() ); } }
package it.unibz.inf.ontop.iq.optimizer.impl; import com.google.inject.Inject; import com.google.inject.Singleton; import it.unibz.inf.ontop.exception.MinorOntopInternalBugException; import it.unibz.inf.ontop.iq.IQ; import it.unibz.inf.ontop.iq.exception.EmptyQueryException; import it.unibz.inf.ontop.iq.IntermediateQuery; import it.unibz.inf.ontop.iq.optimizer.*; import it.unibz.inf.ontop.iq.tools.IQConverter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.UUID; @Singleton public class FixedPointJoinLikeOptimizer implements JoinLikeOptimizer { private static final Logger log = LoggerFactory.getLogger(FixedPointJoinLikeOptimizer.class); private static final int MAX_LOOP = 100; private final InnerJoinMutableOptimizer joinMutableOptimizer; private final LeftJoinMutableOptimizer leftJoinMutableOptimizer; private final InnerJoinIQOptimizer innerJoinIQOptimizer; private final LeftJoinIQOptimizer leftJoinIQOptimizer; private final IQConverter iqConverter; @Inject private FixedPointJoinLikeOptimizer(InnerJoinMutableOptimizer joinMutableOptimizer, LeftJoinMutableOptimizer leftJoinMutableOptimizer, InnerJoinIQOptimizer innerJoinIQOptimizer, LeftJoinIQOptimizer leftJoinIQOptimizer, IQConverter iqConverter){ this.joinMutableOptimizer = joinMutableOptimizer; this.leftJoinMutableOptimizer = leftJoinMutableOptimizer; this.innerJoinIQOptimizer = innerJoinIQOptimizer; this.leftJoinIQOptimizer = leftJoinIQOptimizer; this.iqConverter = iqConverter; } /** * Combines "mutable" optimizations and IQ optimizations */ @Override public IntermediateQuery optimize(IntermediateQuery query) throws EmptyQueryException { UUID conversionVersion = UUID.randomUUID(); boolean converged; do { UUID oldVersionNumber; do { oldVersionNumber = query.getVersionNumber(); query = leftJoinMutableOptimizer.optimize(query); log.debug("New query after left join mutable optimization: \n" + query.toString()); query = joinMutableOptimizer.optimize(query); log.debug("New query after join mutable optimization: \n" + query.toString()); } while (oldVersionNumber != query.getVersionNumber()); converged = (conversionVersion == query.getVersionNumber()); if (!converged) { IQ newIQ = optimizeIQ(iqConverter.convert(query)); query = iqConverter.convert(newIQ, query.getExecutorRegistry()); conversionVersion = query.getVersionNumber(); } } while (!converged); return query; } private IQ optimizeIQ(IQ initialIQ) { // Non-final IQ currentIQ = initialIQ; for (int i=0; i < MAX_LOOP; i++){ IQ optimizedIQ = leftJoinIQOptimizer.optimize(innerJoinIQOptimizer.optimize(currentIQ)) .normalizeForOptimization(); if (optimizedIQ.equals(currentIQ)) return optimizedIQ; else currentIQ = optimizedIQ; } throw new MinorOntopInternalBugException("MAX_LOOP reached"); } }
package com.thindeck.dynamo; import com.jcabi.dynamo.Attributes; import com.jcabi.dynamo.Region; import com.jcabi.dynamo.Table; import com.jcabi.dynamo.mock.H2Data; import com.jcabi.dynamo.mock.MkRegion; import com.thindeck.api.Repo; import com.thindeck.api.Repos; import java.io.IOException; import java.util.Iterator; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; import org.junit.Test; /** * Tests for {@link DyRepos}. * * @author Krzysztof Krason (Krzysztof.Krason@gmail.com) * @version $Id$ */ public final class DyReposTest { /** * DyRepos can get single repo by name. * @throws IOException In case of error. */ @Test public void getRepoByName() throws IOException { final String name = "repo_name"; final Repos repos = new DyRepos(this.region(name)); MatcherAssert.assertThat( repos.get(name).name(), Matchers.is(name) ); } /** * DyRepos throws exception on adding existing repo. * @throws IOException In case of error. */ @Test(expected = IllegalArgumentException.class) public void addExistingRepo() throws IOException { final String name = "existing_repo_name"; final Repos repos = new DyRepos(this.region(name)); repos.add(name); } /** * DyRepos add new repo. * @throws IOException In case of error. */ @Test public void addNewRepo() throws IOException { final String name = "new_repo_name"; final Repos repos = new DyRepos(this.region()); repos.add(name); MatcherAssert.assertThat( repos.get(name).name(), Matchers.is(name) ); } /** * DyRepos can return single repos. * @throws IOException In case of error. */ @Test public void iteratesOverEmptyRepoList() throws IOException { final Iterator<Repo> repos = new DyRepos(this.region()).iterate() .iterator(); MatcherAssert.assertThat(repos.hasNext(), Matchers.is(false)); } /** * DyRepos can return single repos. * @throws IOException In case of error. */ @Test public void iteratesOverSingleRepo() throws IOException { final String name = "repo name"; final Iterator<Repo> repos = new DyRepos(this.region(name)).iterate() .iterator(); MatcherAssert.assertThat(repos.next().name(), Matchers.equalTo(name)); MatcherAssert.assertThat(repos.hasNext(), Matchers.is(false)); } /** * DyRepos can return multiple repos. * @throws IOException In case of error. */ @Test public void iteratesOverMultipleRepos() throws IOException { final String first = "first name"; final String second = "second name"; final Iterator<Repo> repos = new DyRepos(this.region(first, second)) .iterate().iterator(); MatcherAssert.assertThat(repos.next().name(), Matchers.equalTo(first)); MatcherAssert.assertThat(repos.next().name(), Matchers.equalTo(second)); MatcherAssert.assertThat(repos.hasNext(), Matchers.is(false)); } /** * Create region with repos. * @param names Names of the repos. * @return Region created. * @throws IOException In case of error. */ @SuppressWarnings("PMD.AvoidInstantiatingObjectsInLoops") private Region region(final String... names) throws IOException { final Region region = new MkRegion( new H2Data().with( DyRepo.TBL, new String[] {DyRepo.ATTR_NAME}, new String[] {DyRepo.ATTR_UPDATED} ) ); final Table table = region.table(DyRepo.TBL); for (final String name : names) { table.put( new Attributes().with(DyRepo.ATTR_NAME, name) .with(DyRepo.ATTR_UPDATED, System.currentTimeMillis()) ); } return region; } }
package org.csstudio.platform.internal.simpledal.dal; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import org.csstudio.platform.internal.simpledal.AbstractConnector; import org.csstudio.platform.internal.simpledal.converters.ConverterUtil; import org.csstudio.platform.logging.CentralLogger; import org.csstudio.platform.model.pvs.DALPropertyFactoriesProvider; import org.csstudio.platform.model.pvs.IProcessVariableAddress; import org.csstudio.platform.simpledal.ConnectionState; import org.csstudio.platform.simpledal.IProcessVariableValueListener; import org.csstudio.platform.simpledal.IProcessVariableWriteListener; import org.csstudio.platform.simpledal.SettableState; import org.csstudio.platform.simpledal.ValueType; import org.epics.css.dal.CharacteristicInfo; import org.epics.css.dal.DataExchangeException; import org.epics.css.dal.DynamicValueCondition; import org.epics.css.dal.DynamicValueEvent; import org.epics.css.dal.DynamicValueListener; import org.epics.css.dal.DynamicValueProperty; import org.epics.css.dal.ResponseEvent; import org.epics.css.dal.ResponseListener; import org.epics.css.dal.Timestamp; import org.epics.css.dal.context.ConnectionEvent; import org.epics.css.dal.context.LinkListener; import org.epics.css.dal.simple.RemoteInfo; import org.epics.css.dal.spi.PropertyFactory; /** * DAL Connectors are connected to the control system via the DAL API. * * All events received from DAL are forwarded to * {@link IProcessVariableValueListener}s which abstract from DAL. * * For convenience the {@link IProcessVariableValueListener}s are only weakly * referenced. The connector tracks for {@link IProcessVariableValueListener}s * that have been garbage collected and removes those references from its * internal list. This way {@link IProcessVariableValueListener}s dont have * to be disposed explicitly. * * @author Sven Wende * */ @SuppressWarnings("unchecked") public final class DalConnector extends AbstractConnector implements DynamicValueListener, LinkListener, ResponseListener, PropertyChangeListener { private static final int CONNECTION_TIMEOUT = 3000; /** * The DAL property, this connector is connected to. */ private DynamicValueProperty _dalProperty; /** * Constructor. */ public DalConnector(IProcessVariableAddress pvAddress, ValueType valueType) { super(pvAddress, valueType); } /** * {@inheritDoc} */ public void propertyChange(PropertyChangeEvent evt) { // a property change event indicates a change in a characteristic value Object value = evt.getNewValue(); String characteristicId = evt.getPropertyName(); doForwardCharacteristic(value, new Timestamp(), characteristicId); } /** * {@inheritDoc} */ public void conditionChange(DynamicValueEvent event) { // translate a condition change to certain characteristics listeners // might be registered for processConditionChange(event.getCondition(), event.getTimestamp()); } @Override protected void sendInitialValuesForNewListener(String characteristicId, IProcessVariableValueListener listener) { super.sendInitialValuesForNewListener(characteristicId, listener); if (_dalProperty != null) { processConditionChange(_dalProperty.getCondition(), _dalProperty.getLatestValueUpdateTimestamp()); } } private void processConditionChange(DynamicValueCondition condition, Timestamp timestamp) { if (condition != null) { // ... characteristic "timestamp" doForwardCharacteristic(condition.getTimestamp(), timestamp, CharacteristicInfo.C_TIMESTAMP_INFO.getName()); // ... characteristic "status" doForwardCharacteristic(EpicsUtil.extratStatus(condition), timestamp, CharacteristicInfo.C_STATUS_INFO.getName()); // ... characteristic "severity" doForwardCharacteristic(EpicsUtil.toEPICSFlavorSeverity(condition), timestamp, CharacteristicInfo.C_SEVERITY_INFO.getName()); } } /** * {@inheritDoc} */ public void errorResponse(DynamicValueEvent event) { // FIXME: forward condition changes } /** * {@inheritDoc} */ public void timelagStarts(DynamicValueEvent event) { // FIXME: forward condition changes } /** * {@inheritDoc} */ public void timelagStops(DynamicValueEvent event) { // FIXME: forward condition changes } /** * {@inheritDoc} */ public void timeoutStarts(DynamicValueEvent event) { // FIXME: forward condition changes } /** * {@inheritDoc} */ public void timeoutStops(DynamicValueEvent event) { // FIXME: forward condition changes } /** * {@inheritDoc} */ public void valueChanged(final DynamicValueEvent event) { doHandleValueUpdate(event); } /** * {@inheritDoc} */ public void valueUpdated(final DynamicValueEvent event) { doHandleValueUpdate(event); } /** * {@inheritDoc} */ public void connected(final ConnectionEvent e) { // ... forward the new connection state doForwardConnectionStateChange(ConnectionState.translate(e.getState())); // ... forward initial values updateCharacteristicListeners(); } /** * {@inheritDoc} */ public void connectionFailed(ConnectionEvent e) { doForwardConnectionStateChange(ConnectionState.translate(e.getState())); } /** * {@inheritDoc} */ public void connectionLost(ConnectionEvent e) { forwardConnectionEvent(e); } /** * {@inheritDoc} */ public void destroyed(ConnectionEvent e) { forwardConnectionEvent(e); } /** * {@inheritDoc} */ public void disconnected(ConnectionEvent e) { forwardConnectionEvent(e); } /** * {@inheritDoc} */ public void resumed(ConnectionEvent e) { forwardConnectionEvent(e); } /** * {@inheritDoc} */ public void suspended(ConnectionEvent e) { forwardConnectionEvent(e); } /** * {@inheritDoc} */ public void responseError(ResponseEvent event) { Exception e = event.getResponse().getError(); doForwardError(e != null ? e.getMessage() : "Unknown error!"); } /** * {@inheritDoc} */ public void responseReceived(ResponseEvent event) { // Igor: if necessary update last value. We expect one event only // originating // from initial asynchronous get doForwardValue(event.getResponse().getValue(), event.getResponse().getTimestamp()); } private void forwardConnectionEvent(ConnectionEvent e) { doForwardConnectionStateChange(ConnectionState.translate(e.getState())); } /** * Waits until DAL property is connected or timeout has elapsed * * @param timeout * the timeout to wait * * @return <code>true</code> if property was connected */ public boolean waitTillConnected(long timeout) { return EpicsUtil.waitTillConnected(_dalProperty, timeout); } /** * {@inheritDoc} */ protected void doGetValueAsynchronously(final IProcessVariableValueListener listener) { if (waitTillConnected(CONNECTION_TIMEOUT)) { block(); ResponseListener responseListener = new ResponseListener() { public void responseError(ResponseEvent event) { // forward the error Exception error = event.getResponse().getError(); String errorMsg = error != null ? error.getMessage() : "Unknown Error!"; listener.errorOccured(errorMsg); printDebugInfo("AGET-ERROR : " + error + " (" + event.getResponse().toString() + ")"); } public void responseReceived(ResponseEvent event) { Object value = event.getResponse().getValue(); Timestamp timestamp = event.getResponse().getTimestamp(); listener.valueChanged(ConverterUtil.convert(value, getValueType()), timestamp); printDebugInfo("AGET-RETURN: " + getValueType() + " " + value); } }; printDebugInfo("GET ASYNC"); try { _dalProperty.getAsynchronous(responseListener); } catch (Exception e) { listener.errorOccured(e.getLocalizedMessage()); } } else { listener.errorOccured("Internal error. No connection available."); } } /** * {@inheritDoc} */ @Override protected Object doGetValueSynchronously() throws Exception { Object result = null; // ... try to read the value if (waitTillConnected(CONNECTION_TIMEOUT)) { printDebugInfo("GET SYNC"); result = _dalProperty.getValue(); } return result; } /** * {@inheritDoc} */ @Override protected void doSetValueAsynchronously(Object value, final IProcessVariableWriteListener listener) throws Exception { if (waitTillConnected(3000)) { if (_dalProperty.isSettable()) { Object convertedValue; try { convertedValue = ConverterUtil.convert(value, getValueType()); _dalProperty.setAsynchronous(convertedValue, new ResponseListener() { public void responseReceived(ResponseEvent event) { if (listener != null) { listener.success(); } CentralLogger.getInstance().debug(null, event.getResponse().toString()); } public void responseError(ResponseEvent event) { if (listener != null) { listener.error(event.getResponse().getError()); } CentralLogger.getInstance().error(null, event.getResponse().getError()); } }); } catch (NumberFormatException nfe) { // Do nothing! Is a invalid value format! CentralLogger.getInstance().warn(this, "Invalid value format. (" + value + ") is not set to " + getName()); return; } } else { throw new Exception("Property " + _dalProperty.getUniqueName() + " is not settable"); } } else { throw new Exception("Property not available"); } } /** * {@inheritDoc} */ @Override protected boolean doSetValueSynchronously(Object value) { boolean success = false; if (waitTillConnected(CONNECTION_TIMEOUT)) { if (_dalProperty.isSettable()) { try { _dalProperty.setValue(ConverterUtil.convert(value, getValueType())); success = true; } catch (NumberFormatException nfe) { CentralLogger.getInstance().warn(this, "Invalid value format. (" + value + ") is not set to" + getName()); } catch (DataExchangeException e) { CentralLogger.getInstance().error(null, e); } } else { printDebugInfo("Property not settable"); } } else { printDebugInfo("Property not available"); } return success; } /** * {@inheritDoc} */ protected void doInit() { // get or create a real DAL property DynamicValueProperty property = null; try { org.epics.css.dal.context.RemoteInfo oldRi = getProcessVariableAddress().toDalRemoteInfo(); RemoteInfo ri = new RemoteInfo(oldRi.getPlugType(), oldRi.getName(), null, null); PropertyFactory factory = DALPropertyFactoriesProvider.getInstance().getPropertyFactory( getProcessVariableAddress().getControlSystem()); switch (getValueType()) { case OBJECT: property = factory.getProperty(ri); break; case STRING: /* * swende: 2010-03-06: this is a dirty quickfix which is related * to problems with SDS displays that specifiy * "pv[severity], String" as pv address / please remove if it * does not work as expected or when all current SDS files at * DESY have been propertly changed */ String characteristic = getProcessVariableAddress().getCharacteristic(); //If connection is made as pv[severity] or just pv, than ignore everything //and go to default. In all other cases (e.g. pv[graphMin}, string), create //a default property. if (characteristic != null && !CharacteristicInfo.C_SEVERITY_INFO.getName().equals(characteristic)) { property = factory.getProperty(ri); break; } default: property = factory.getProperty(ri, getValueType().getDalType(), null); break; } if (property != null) { setDalProperty(property); } } catch (Throwable e) { forwardError(e.getLocalizedMessage()); } } /** * {@inheritDoc} */ @Override protected void doDispose() { printDebugInfo("DISPOSE"); DynamicValueProperty property = _dalProperty; setDalProperty(null); if (property != null && !property.isDestroyed()) { // remove link listener property.removeLinkListener(this); // remove value listeners property.removeDynamicValueListener(this); // remove response listeners property.removeResponseListener(this); // try to dispose the DAL property PropertyFactory factory = DALPropertyFactoriesProvider.getInstance().getPropertyFactory( getProcessVariableAddress().getControlSystem()); // if the property is not used anymore by other connectors, // destroy it if (property.getDynamicValueListeners().length <= 1 && property.getResponseListeners().length <= 0) { printDebugInfo("DESTROY"); factory.getPropertyFamily().destroy(property); // DAL caches a reference to a former ResponseListener // via its latestResponse and latestRequest fields on // DynamicValuePropertyImpl.class /* * try { Object e = property.getLatestResponse(); * * property.getAsynchronous(null); * * while (e == property.getLatestResponse()) { Thread.sleep(1); * } } catch (DataExchangeException e) { e.printStackTrace(); } * catch (InterruptedException e) { e.printStackTrace(); } */ assert !factory.getPropertyFamily().contains(property) : "!getPropertyFactory().getPropertyFamily().contains(property)"; } } } /** * {@inheritDoc} */ @Override protected SettableState doIsSettable() { SettableState result = SettableState.UNKNOWN; try { // DAL encapsulates the detection of the current user internally // (probably via global system properties) if (waitTillConnected(CONNECTION_TIMEOUT)) { result = _dalProperty.isSettable() ? SettableState.SETTABLE : SettableState.NOT_SETTABLE; } } catch (Exception e) { CentralLogger.getInstance().error(this, "We could not check the settable-state of [" + getProcessVariableAddress().toString() + "]", e); result = SettableState.UNKNOWN; } return result; } /** * {@inheritDoc} */ @Override protected void doGetCharacteristicAsynchronously(final String characteristicId, final ValueType valueType, final IProcessVariableValueListener listener) { try { if (waitTillConnected(CONNECTION_TIMEOUT)) { ResponseListener responseListener = new ResponseListener() { public void responseError(ResponseEvent event) { // forward the error Exception error = event.getResponse().getError(); String errorMsg = error != null ? error.getMessage() : "Unknown Error!"; listener.errorOccured(errorMsg); printDebugInfo("AGET-ERROR [" + characteristicId + "] : " + error + " (" + event.getResponse().toString() + ")"); } public void responseReceived(ResponseEvent event) { Object value = event.getResponse().getValue(); Timestamp timestamp = event.getResponse().getTimestamp(); listener.valueChanged(value, timestamp); // listener.valueChanged(ConverterUtil.convert(value, // valueType), timestamp); printDebugInfo("AGET-RETURN: " + valueType + " " + value); } }; printDebugInfo("GET ASYNC [" + characteristicId + "]"); _dalProperty.getCharacteristicAsynchronously(characteristicId, responseListener); } else { listener.errorOccured("Internal error. No connection available."); } } catch (Exception e) { listener.errorOccured(e.getLocalizedMessage()); } } /** * {@inheritDoc} */ @Override protected Object doGetCharacteristicSynchronously(String characteristicId, ValueType valueType) throws Exception { Object result = null; // ... try to read the value if (waitTillConnected(CONNECTION_TIMEOUT)) { if (characteristicId.equals(CharacteristicInfo.C_SEVERITY_INFO.getName())) { result = EpicsUtil.toEPICSFlavorSeverity(_dalProperty.getCondition()); } else if (characteristicId.equals(CharacteristicInfo.C_STATUS_INFO.getName())) { result = EpicsUtil.extratStatus(_dalProperty.getCondition()); } else if (characteristicId.equals(CharacteristicInfo.C_TIMESTAMP_INFO.getName())) { result = _dalProperty.getCondition().getTimestamp(); } else { Object tmp = _dalProperty.getCharacteristic(characteristicId); result = valueType != null ? ConverterUtil.convert(tmp, valueType) : tmp; } } return result; } /** * Returns the DAL property that is internally used. * * @return the internally used DAL property */ protected DynamicValueProperty getDalProperty() { return _dalProperty; } /** * Sets the DAL property, this connector is connected to. * * @param dalProperty * the DAL property */ private void setDalProperty(DynamicValueProperty dalProperty) { if (_dalProperty != null) { _dalProperty.removeDynamicValueListener(this); _dalProperty.removePropertyChangeListener(this); _dalProperty.removeLinkListener(this); } _dalProperty = dalProperty; if (_dalProperty != null) { _dalProperty.addDynamicValueListener(this); _dalProperty.addPropertyChangeListener(this); // we add a LinkListener to get informed of connection state changes _dalProperty.addLinkListener(this); // send initial connection state forwardConnectionState(ConnectionState.translate(_dalProperty.getConnectionState())); } } /** * A change of the "normal" value has been reported and needs to be * forwarded. * * @param event * the event that reports the value update */ private void doHandleValueUpdate(DynamicValueEvent event) { // ... forward the value doForwardValue(event.getValue(), event.getTimestamp()); // ... forward an additional "timestamp" characteristic doForwardCharacteristic(event.getTimestamp(), event.getTimestamp(), CharacteristicInfo.C_TIMESTAMP_INFO.getName()); } }
package com.exedio.cope; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.sql.Blob; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Savepoint; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import bak.pcj.list.IntArrayList; import com.exedio.dsmf.Driver; import com.exedio.dsmf.SQLRuntimeException; import com.exedio.dsmf.Schema; final class Database // TODO SOON make methods non-final { private static final String NO_SUCH_ROW = "no such row"; private final ArrayList<Table> tables = new ArrayList<Table>(); private final HashMap<String, UniqueConstraint> uniqueConstraintsByID = new HashMap<String, UniqueConstraint>(); private boolean buildStage = true; final Driver driver; final DialectParameters dialectParameters; final Dialect dialect; private final boolean migrationSupported; final boolean prepare; private final boolean log; private final boolean logStatementInfo; private final boolean butterflyPkSource; private final boolean fulltextIndex; final ConnectionPool connectionPool; private final java.util.Properties forcedNames; final java.util.Properties tableOptions; final Dialect.LimitSupport limitSupport; final long blobLengthFactor; final boolean supportsReadCommitted; final boolean supportsGetBytes; final boolean supportsBlobInResultSet; final boolean needsSavepoint; final boolean oracle; // TODO remove Database(final Driver driver, final DialectParameters dialectParameters, final Dialect dialect, final boolean migrationSupported) { final Properties properties = dialectParameters.properties; this.driver = driver; this.dialectParameters = dialectParameters; this.dialect = dialect; this.migrationSupported = migrationSupported; this.prepare = !properties.getDatabaseDontSupportPreparedStatements(); this.log = properties.getDatabaseLog(); this.logStatementInfo = properties.getDatabaseLogStatementInfo(); this.butterflyPkSource = properties.getPkSourceButterfly(); this.fulltextIndex = properties.getFulltextIndex(); this.connectionPool = new ConnectionPool( new CopeConnectionFactory(properties), properties.getConnectionPoolIdleLimit(), properties.getConnectionPoolIdleInitial()); this.forcedNames = properties.getDatabaseForcedNames(); this.tableOptions = properties.getDatabaseTableOptions(); this.limitSupport = properties.getDatabaseDontSupportLimit() ? Dialect.LimitSupport.NONE : dialect.getLimitSupport(); this.blobLengthFactor = dialect.getBlobLengthFactor(); this.oracle = getClass().getName().equals("com.exedio.cope.OracleDatabase"); //System.out.println("using database "+getClass()); assert limitSupport!=null; this.supportsReadCommitted = !dialect.fakesSupportReadCommitted() && dialectParameters.supportsTransactionIsolationLevel; this.supportsGetBytes = dialect.supportsGetBytes(); this.supportsBlobInResultSet = dialect.supportsBlobInResultSet(); this.needsSavepoint = dialect.needsSavepoint(); } final Driver getDriver() { return driver; } final java.util.Properties getTableOptions() { return tableOptions; } final ConnectionPool getConnectionPool() { return connectionPool; } final void addTable(final Table table) { if(!buildStage) throw new RuntimeException(); tables.add(table); } final void addUniqueConstraint(final String constraintID, final UniqueConstraint constraint) { if(!buildStage) throw new RuntimeException(); final Object collision = uniqueConstraintsByID.put(constraintID, constraint); if(collision!=null) throw new RuntimeException("ambiguous unique constraint "+constraint+" trimmed to >"+constraintID+"< colliding with "+collision); } protected final Statement createStatement() { return createStatement(true); } protected final Statement createStatement(final boolean qualifyTable) { return new Statement(this, qualifyTable); } protected final Statement createStatement(final Query<? extends Object> query) { return new Statement(this, query); } final void createDatabase(final int migrationVersion) { buildStage = false; makeSchema().create(); if(migrationSupported) { final ConnectionPool connectionPool = this.connectionPool; Connection con = null; try { con = connectionPool.getConnection(true); notifyMigration(con, migrationVersion, new Date(), "created schema", false); } catch(SQLException e) { throw new SQLRuntimeException(e, "migrate"); } finally { if(con!=null) { try { connectionPool.putConnection(con); con = null; } catch(SQLException ex) { throw new SQLRuntimeException(ex, "close"); } } } } } final void createDatabaseConstraints(final int mask) { buildStage = false; makeSchema().createConstraints(mask); } //private static int checkTableTime = 0; final void checkDatabase(final Connection connection) { buildStage = false; //final long time = System.currentTimeMillis(); // IMPLEMENTATION NOTE // MySQL can have at most 63 joined tables in one statement // and other databases probably have similar constraints as // well, so we limit the number of joined table here. final int CHUNK_LENGTH = 60; final int tablesSize = tables.size(); for(int chunkFromIndex = 0; chunkFromIndex<tablesSize; chunkFromIndex+=CHUNK_LENGTH) { final int chunkToIndex = Math.min(chunkFromIndex+CHUNK_LENGTH, tablesSize); final List<Table> tableChunk = tables.subList(chunkFromIndex, chunkToIndex); final Statement bf = createStatement(true); bf.append("select count(*) from ").defineColumnInteger(); boolean first = true; for(final Table table : tableChunk) { if(first) first = false; else bf.append(','); bf.append(table.protectedID); } bf.append(" where "); first = true; for(final Table table : tableChunk) { if(first) first = false; else bf.append(" and "); final Column primaryKey = table.primaryKey; bf.append(primaryKey). append('='). appendParameter(Type.NOT_A_PK); for(final Column column : table.getColumns()) { bf.append(" and "). append(column); if(column instanceof BlobColumn || (oracle && column instanceof StringColumn && ((StringColumn)column).maximumLength>=4000)) { bf.append("is not null"); } else { bf.append('='). appendParameter(column, column.getCheckValue()); } } } executeSQLQuery(connection, bf, new ResultSetHandler() { public void handle(final ResultSet resultSet) throws SQLException { if(!resultSet.next()) throw new SQLException(NO_SUCH_ROW); } }, false, false ); } } final void dropDatabase() { buildStage = false; makeSchema().drop(); } final void dropDatabaseConstraints(final int mask) { buildStage = false; makeSchema().dropConstraints(mask); } final void tearDownDatabase() { buildStage = false; makeSchema().tearDown(); } final void tearDownDatabaseConstraints(final int mask) { buildStage = false; makeSchema().tearDownConstraints(mask); } final void checkEmptyDatabase(final Connection connection) { buildStage = false; //final long time = System.currentTimeMillis(); for(final Table table : tables) { final int count = countTable(connection, table); if(count>0) throw new RuntimeException("there are "+count+" items left for table "+table.id); } //final long amount = (System.currentTimeMillis()-time); //checkEmptyTableTime += amount; //System.out.println("CHECK EMPTY TABLES "+amount+"ms accumulated "+checkEmptyTableTime); } final ArrayList<Object> search(final Connection connection, final Query<? extends Object> query, final boolean doCountOnly) { buildStage = false; listener.search(connection, query, doCountOnly); final int limitStart = query.limitStart; final int limitCount = query.limitCount; final boolean limitActive = limitStart>0 || limitCount!=Query.UNLIMITED_COUNT; final boolean distinct = query.distinct; final ArrayList<Join> queryJoins = query.joins; final Statement bf = createStatement(query); if(!doCountOnly && limitActive && limitSupport==Dialect.LimitSupport.CLAUSES_AROUND) dialect.appendLimitClause(bf, limitStart, limitCount); bf.append("select"); if(!doCountOnly && limitActive && limitSupport==Dialect.LimitSupport.CLAUSE_AFTER_SELECT) dialect.appendLimitClause(bf, limitStart, limitCount); bf.append(' '); final Selectable[] selects = query.selects; final Column[] selectColumns = new Column[selects.length]; final Type[] selectTypes = new Type[selects.length]; if(!distinct&&doCountOnly) { bf.append("count(*)"); } else { if(doCountOnly) bf.append("count("); if(distinct) bf.append("distinct "); for(int selectIndex = 0; selectIndex<selects.length; selectIndex++) { final Selectable select = selects[selectIndex]; final Column selectColumn; final Type selectType = select.getType(); final Table selectTable; final Column selectPrimaryKey; if(selectIndex>0) bf.append(','); if(select instanceof Aggregate) { bf.append(select, null).defineColumn(select); final Function selectSource = ((Aggregate)select).getSource(); if(selectSource instanceof FunctionField) { selectColumn = ((FunctionField)selectSource).getColumn(); } else if(selectSource instanceof Type.This) { selectTable = selectType.getTable(); selectPrimaryKey = selectTable.primaryKey; selectColumn = selectPrimaryKey; if(selectColumn.primaryKey) { final StringColumn selectTypeColumn = selectColumn.getTypeColumn(); if(selectTypeColumn==null) selectTypes[selectIndex] = selectType.getOnlyPossibleTypeOfInstances(); } else selectTypes[selectIndex] = selectType.getOnlyPossibleTypeOfInstances(); } else { selectColumn = null; final View view = (View)selectSource; bf.append(view, (Join)null).defineColumn(view); } } else if(select instanceof FunctionField) { selectColumn = ((FunctionField)select).getColumn(); bf.append(select, (Join)null).defineColumn(select); if(select instanceof ItemField) { final StringColumn typeColumn = ((ItemField)select).getTypeColumn(); if(typeColumn!=null) bf.append(',').append(typeColumn).defineColumn(typeColumn); } } else if(select instanceof Type.This) { selectTable = selectType.getTable(); selectPrimaryKey = selectTable.primaryKey; selectColumn = selectPrimaryKey; bf.appendPK(selectType, (Join)null).defineColumn(select); if(selectColumn.primaryKey) { final StringColumn selectTypeColumn = selectColumn.getTypeColumn(); if(selectTypeColumn!=null) { bf.append(','). append(selectTypeColumn).defineColumn(selectTypeColumn); } else selectTypes[selectIndex] = selectType.getOnlyPossibleTypeOfInstances(); } else selectTypes[selectIndex] = selectType.getOnlyPossibleTypeOfInstances(); } else { selectColumn = null; final View view = (View)select; bf.append(view, (Join)null).defineColumn(view); } selectColumns[selectIndex] = selectColumn; } if(doCountOnly) bf.append(')'); } bf.append(" from "). appendTypeDefinition((Join)null, query.type); if(queryJoins!=null) { for(final Join join : queryJoins) { final Condition joinCondition = join.condition; if(joinCondition==null) { if(join.kind!=Join.Kind.INNER) throw new RuntimeException("outer join must have join condition"); bf.append(" cross join "); } else { bf.append(' '). append(join.kind.sql); } bf.appendTypeDefinition(join, join.type); if(joinCondition!=null) { bf.append(" on "); joinCondition.append(bf); } } } if(query.condition!=null) { bf.append(" where "); query.condition.append(bf); } if(!doCountOnly) { final Function[] orderBy = query.orderBy; if(orderBy!=null) { final boolean[] orderAscending = query.orderAscending; for(int i = 0; i<orderBy.length; i++) { if(i==0) bf.append(" order by "); else bf.append(','); if(orderBy[i] instanceof ItemField) { final ItemField<? extends Item> itemOrderBy = (ItemField<? extends Item>)orderBy[i]; itemOrderBy.getValueType().getPkSource().appendOrderByExpression(bf, itemOrderBy); } else if(orderBy[i] instanceof Type.This) { final Type.This<? extends Item> itemOrderBy = (Type.This<? extends Item>)orderBy[i]; itemOrderBy.type.getPkSource().appendOrderByExpression(bf, itemOrderBy); } else bf.append(orderBy[i], (Join)null); if(!orderAscending[i]) bf.append(" desc"); // TODO break here, if already ordered by some unique function } } if(limitActive && limitSupport==Dialect.LimitSupport.CLAUSE_AFTER_WHERE) dialect.appendLimitClause(bf, limitStart, limitCount); } if(!doCountOnly && limitActive && limitSupport==Dialect.LimitSupport.CLAUSES_AROUND) dialect.appendLimitClause2(bf, limitStart, limitCount); final Type[] types = selectTypes; final Model model = query.model; final ArrayList<Object> result = new ArrayList<Object>(); if(limitStart<0) throw new RuntimeException(); if(selects.length!=selectColumns.length) throw new RuntimeException(); if(selects.length!=types.length) throw new RuntimeException(); //System.out.println(bf.toString()); query.addStatementInfo(executeSQLQuery(connection, bf, new ResultSetHandler() { public void handle(final ResultSet resultSet) throws SQLException { if(doCountOnly) { resultSet.next(); result.add(Integer.valueOf(resultSet.getInt(1))); if(resultSet.next()) throw new RuntimeException(); return; } if(limitStart>0 && limitSupport==Dialect.LimitSupport.NONE) { // TODO: ResultSet.relative // Would like to use // resultSet.relative(limitStart+1); // but this throws a java.sql.SQLException: // Invalid operation for forward only resultset : relative for(int i = limitStart; i>0; i resultSet.next(); } int i = ((limitCount==Query.UNLIMITED_COUNT||(limitSupport!=Dialect.LimitSupport.NONE)) ? Integer.MAX_VALUE : limitCount ); if(i<=0) throw new RuntimeException(String.valueOf(limitCount)); while(resultSet.next() && (--i)>=0) { int columnIndex = 1; final Object[] resultRow = (selects.length > 1) ? new Object[selects.length] : null; final Row dummyRow = new Row(); for(int selectIndex = 0; selectIndex<selects.length; selectIndex++) { final Selectable select; { Selectable select0 = selects[selectIndex]; if(select0 instanceof Aggregate) select0 = ((Aggregate)select0).getSource(); select = select0; } final Object resultCell; if(select instanceof FunctionField) { selectColumns[selectIndex].load(resultSet, columnIndex++, dummyRow); final FunctionField selectField = (FunctionField)select; if(select instanceof ItemField) { final StringColumn typeColumn = ((ItemField)selectField).getTypeColumn(); if(typeColumn!=null) typeColumn.load(resultSet, columnIndex++, dummyRow); } resultCell = selectField.get(dummyRow); } else if(select instanceof View) { final View selectFunction = (View)select; resultCell = selectFunction.load(resultSet, columnIndex++); } else { final Number pk = (Number)resultSet.getObject(columnIndex++); //System.out.println("pk:"+pk); if(pk==null) { // can happen when using right outer joins resultCell = null; } else { final Type type = types[selectIndex]; final Type currentType; if(type==null) { final String typeID = resultSet.getString(columnIndex++); currentType = model.findTypeByID(typeID); if(currentType==null) throw new RuntimeException("no type with type id "+typeID); } else currentType = type; resultCell = currentType.getItemObject(pk.intValue()); } } if(resultRow!=null) resultRow[selectIndex] = resultCell; else result.add(resultCell); } if(resultRow!=null) result.add(Collections.unmodifiableList(Arrays.asList(resultRow))); } } }, query.makeStatementInfo, false)); return result; } private final void log(final long start, final long end, final Statement statement) { final SimpleDateFormat df = new SimpleDateFormat("dd.MM.yyyy HH:mm:ss.SSS"); System.out.println(df.format(new Date(start)) + " " + (end-start) + "ms: " + statement.getText()+" "+statement.parameters); } final void load(final Connection connection, final PersistentState state) { buildStage = false; listener.load(connection, state); final Statement bf = createStatement(state.type.supertype!=null); bf.append("select "); boolean first = true; for(Type type = state.type; type!=null; type = type.supertype) { for(final Column column : type.getTable().getColumns()) { if(!(column instanceof BlobColumn)) { if(first) first = false; else bf.append(','); bf.append(column).defineColumn(column); } } } if(first) { // no columns in type bf.appendPK(state.type, (Join)null); } bf.append(" from "); first = true; for(Type type = state.type; type!=null; type = type.supertype) { if(first) first = false; else bf.append(','); bf.append(type.getTable().protectedID); } bf.append(" where "); first = true; for(Type type = state.type; type!=null; type = type.supertype) { if(first) first = false; else bf.append(" and "); bf.appendPK(type, (Join)null). append('='). appendParameter(state.pk). appendTypeCheck(type.getTable(), state.type); // Here this also checks additionally for Model#findByID, that the item has the type given in the ID. } //System.out.println(bf.toString()); executeSQLQuery(connection, bf, state, false, false); } final void store( final Connection connection, final State state, final boolean present, final Map<BlobColumn, byte[]> blobs) { store(connection, state, present, blobs, state.type); } private final void store( final Connection connection, final State state, final boolean present, final Map<BlobColumn, byte[]> blobs, final Type<?> type) { buildStage = false; final Type supertype = type.supertype; if(supertype!=null) store(connection, state, present, blobs, supertype); final Table table = type.getTable(); final List<Column> columns = table.getColumns(); final Statement bf = createStatement(); final StringColumn typeColumn = table.typeColumn; if(present) { bf.append("update "). append(table.protectedID). append(" set "); boolean first = true; for(final Column column : columns) { if(!(column instanceof BlobColumn) || blobs.containsKey(column)) { if(first) first = false; else bf.append(','); bf.append(column.protectedID). append('='); if(column instanceof BlobColumn) bf.appendParameterBlob((BlobColumn)column, blobs.get(column)); else bf.appendParameter(column, state.store(column)); } } if(first) // no columns in table return; bf.append(" where "). append(table.primaryKey.protectedID). append('='). appendParameter(state.pk). appendTypeCheck(table, state.type); } else { bf.append("insert into "). append(table.protectedID). append("("). append(table.primaryKey.protectedID); if(typeColumn!=null) { bf.append(','). append(typeColumn.protectedID); } for(final Column column : columns) { if(!(column instanceof BlobColumn) || blobs.containsKey(column)) { bf.append(','). append(column.protectedID); } } bf.append(")values("). appendParameter(state.pk); if(typeColumn!=null) { bf.append(','). appendParameter(state.type.id); } for(final Column column : columns) { if(column instanceof BlobColumn) { if(blobs.containsKey(column)) { bf.append(','). appendParameterBlob((BlobColumn)column, blobs.get(column)); } } else { bf.append(','). appendParameter(column, state.store(column)); } } bf.append(')'); } //System.out.println("storing "+bf.toString()); executeSQLUpdate(connection, bf, 1, type.declaredUniqueConstraints); } final void delete(final Connection connection, final Item item) { buildStage = false; final Type type = item.type; final int pk = item.pk; for(Type currentType = type; currentType!=null; currentType = currentType.supertype) { final Table currentTable = currentType.getTable(); final Statement bf = createStatement(); bf.append("delete from "). append(currentTable.protectedID). append(" where "). append(currentTable.primaryKey.protectedID). append('='). appendParameter(pk); //System.out.println("deleting "+bf.toString()); executeSQLUpdate(connection, bf, 1); } } final byte[] load(final Connection connection, final BlobColumn column, final Item item) { // TODO reuse code in load blob methods buildStage = false; final Table table = column.table; final Statement bf = createStatement(); bf.append("select "). append(column.protectedID).defineColumn(column). append(" from "). append(table.protectedID). append(" where "). append(table.primaryKey.protectedID). append('='). appendParameter(item.pk). appendTypeCheck(table, item.type); final LoadBlobResultSetHandler handler = new LoadBlobResultSetHandler(supportsGetBytes); executeSQLQuery(connection, bf, handler, false, false); return handler.result; } private static class LoadBlobResultSetHandler implements ResultSetHandler { final boolean supportsGetBytes; LoadBlobResultSetHandler(final boolean supportsGetBytes) { this.supportsGetBytes = supportsGetBytes; } byte[] result; public void handle(final ResultSet resultSet) throws SQLException { if(!resultSet.next()) throw new SQLException(NO_SUCH_ROW); result = supportsGetBytes ? resultSet.getBytes(1) : loadBlob(resultSet.getBlob(1)); } private static final byte[] loadBlob(final Blob blob) throws SQLException { if(blob==null) return null; return DataField.copy(blob.getBinaryStream(), blob.length()); } } final void load(final Connection connection, final BlobColumn column, final Item item, final OutputStream data, final DataField field) { buildStage = false; final Table table = column.table; final Statement bf = createStatement(); bf.append("select "). append(column.protectedID).defineColumn(column). append(" from "). append(table.protectedID). append(" where "). append(table.primaryKey.protectedID). append('='). appendParameter(item.pk). appendTypeCheck(table, item.type); executeSQLQuery(connection, bf, new ResultSetHandler(){ public void handle(final ResultSet resultSet) throws SQLException { if(!resultSet.next()) throw new SQLException(NO_SUCH_ROW); if(supportsBlobInResultSet) { final Blob blob = resultSet.getBlob(1); if(blob!=null) { InputStream source = null; try { source = blob.getBinaryStream(); field.copy(source, data, blob.length(), item); } catch(IOException e) { throw new RuntimeException(e); } finally { if(source!=null) { try { source.close(); } catch(IOException e) {/*IGNORE*/} } } } } else { InputStream source = null; try { source = resultSet.getBinaryStream(1); if(source!=null) field.copy(source, data, item); } catch(IOException e) { throw new RuntimeException(e); } finally { if(source!=null) { try { source.close(); } catch(IOException e) {/*IGNORE*/} } } } } }, false, false); } final long loadLength(final Connection connection, final BlobColumn column, final Item item) { buildStage = false; final Table table = column.table; final Statement bf = createStatement(); bf.append("select length("). append(column.protectedID).defineColumnInteger(). append(") from "). append(table.protectedID). append(" where "). append(table.primaryKey.protectedID). append('='). appendParameter(item.pk). appendTypeCheck(table, item.type); final LoadBlobLengthResultSetHandler handler = new LoadBlobLengthResultSetHandler(); executeSQLQuery(connection, bf, handler, false, false); return handler.result; } private final class LoadBlobLengthResultSetHandler implements ResultSetHandler { long result; public void handle(final ResultSet resultSet) throws SQLException { if(!resultSet.next()) throw new SQLException(NO_SUCH_ROW); final Object o = resultSet.getObject(1); if(o!=null) { long value = ((Number)o).longValue(); final long factor = blobLengthFactor; if(factor!=1) { if(value%factor!=0) throw new RuntimeException("not dividable "+value+'/'+factor); value /= factor; } result = value; } else result = -1; } } final void store( final Connection connection, final BlobColumn column, final Item item, final InputStream data, final DataField field) throws IOException { buildStage = false; final Table table = column.table; final Statement bf = createStatement(); bf.append("update "). append(table.protectedID). append(" set "). append(column.protectedID). append('='); if(data!=null) bf.appendParameterBlob(column, data, field, item); else bf.append("NULL"); bf.append(" where "). append(table.primaryKey.protectedID). append('='). appendParameter(item.pk). appendTypeCheck(table, item.type); //System.out.println("storing "+bf.toString()); executeSQLUpdate(connection, bf, 1); } static interface ResultSetHandler { public void handle(ResultSet resultSet) throws SQLException; } private final static int convertSQLResult(final Object sqlInteger) { // IMPLEMENTATION NOTE // Whether the returned object is an Integer, a Long or a BigDecimal, // depends on the database used and for oracle on whether // OracleStatement.defineColumnType is used or not, so we support all // here. return ((Number)sqlInteger).intValue(); } //private static int timeExecuteQuery = 0; protected final StatementInfo executeSQLQuery( final Connection connection, final Statement statement, final ResultSetHandler resultSetHandler, final boolean makeStatementInfo, final boolean explain) { java.sql.Statement sqlStatement = null; ResultSet resultSet = null; try { final boolean log = !explain && (this.log || this.logStatementInfo || makeStatementInfo); final String sqlText = statement.getText(); final long logStart = log ? System.currentTimeMillis() : 0; final long logPrepared; final long logExecuted; if(!prepare) { sqlStatement = connection.createStatement(); dialect.defineColumnTypes(statement.columnTypes, sqlStatement); logPrepared = log ? System.currentTimeMillis() : 0; resultSet = sqlStatement.executeQuery(sqlText); logExecuted = log ? System.currentTimeMillis() : 0; resultSetHandler.handle(resultSet); } else { final PreparedStatement prepared = connection.prepareStatement(sqlText); sqlStatement = prepared; int parameterIndex = 1; for(Iterator i = statement.parameters.iterator(); i.hasNext(); parameterIndex++) setObject(sqlText, prepared, parameterIndex, i.next()); dialect.defineColumnTypes(statement.columnTypes, sqlStatement); logPrepared = log ? System.currentTimeMillis() : 0; resultSet = prepared.executeQuery(); logExecuted = log ? System.currentTimeMillis() : 0; resultSetHandler.handle(resultSet); } final long logResultRead = log ? System.currentTimeMillis() : 0; if(resultSet!=null) { resultSet.close(); resultSet = null; } if(sqlStatement!=null) { sqlStatement.close(); sqlStatement = null; } final long logEnd = log ? System.currentTimeMillis() : 0; if(!explain && this.log) log(logStart, logEnd, statement); final StatementInfo statementInfo = (!explain && (this.logStatementInfo || makeStatementInfo)) ? makeStatementInfo(statement, connection, logStart, logPrepared, logExecuted, logResultRead, logEnd) : null; if(!explain && this.logStatementInfo) statementInfo.print(System.out); return makeStatementInfo ? statementInfo : null; } catch(SQLException e) { throw new SQLRuntimeException(e, statement.toString()); } finally { if(resultSet!=null) { try { resultSet.close(); } catch(SQLException e) { // exception is already thrown } } if(sqlStatement!=null) { try { sqlStatement.close(); } catch(SQLException e) { // exception is already thrown } } } } private final void executeSQLUpdate(final Connection connection, final Statement statement, final int expectedRows) throws UniqueViolationException { executeSQLUpdate(connection, statement, expectedRows, null); } private final void executeSQLUpdate( final Connection connection, final Statement statement, final int expectedRows, final List<UniqueConstraint> threatenedUniqueConstraints) throws UniqueViolationException { java.sql.Statement sqlStatement = null; Savepoint savepoint = null; try { final String sqlText = statement.getText(); final long logStart = log ? System.currentTimeMillis() : 0; final int rows; if(threatenedUniqueConstraints!=null && threatenedUniqueConstraints.size()>0 && needsSavepoint) savepoint = connection.setSavepoint(); if(!prepare) { sqlStatement = connection.createStatement(); rows = sqlStatement.executeUpdate(sqlText); } else { final PreparedStatement prepared = connection.prepareStatement(sqlText); sqlStatement = prepared; int parameterIndex = 1; for(Iterator i = statement.parameters.iterator(); i.hasNext(); parameterIndex++) setObject(sqlText, prepared, parameterIndex, i.next()); rows = prepared.executeUpdate(); } final long logEnd = log ? System.currentTimeMillis() : 0; if(log) log(logStart, logEnd, statement); //System.out.println("("+rows+"): "+statement.getText()); if(rows!=expectedRows) throw new RuntimeException("expected "+expectedRows+" rows, but got "+rows+" on statement "+sqlText); } catch(SQLException e) { final UniqueViolationException wrappedException = wrapException(e, threatenedUniqueConstraints); if(wrappedException!=null) { if(savepoint!=null) { try { connection.rollback(savepoint); savepoint = null; } catch(SQLException ex) { throw new SQLRuntimeException(e, ex.getMessage() + " on rollback of: " + statement.toString()); } } throw wrappedException; } else throw new SQLRuntimeException(e, statement.toString()); } finally { if(sqlStatement!=null) { try { sqlStatement.close(); } catch(SQLException e) { // exception is already thrown } } } } private static final void setObject(String s, final PreparedStatement statement, final int parameterIndex, final Object value) throws SQLException { //try{ statement.setObject(parameterIndex, value); //}catch(SQLException e){ throw new SQLRuntimeException(e, "setObject("+parameterIndex+","+value+")"+s); } } final StatementInfo makeStatementInfo( final Statement statement, final Connection connection, final long start, final long prepared, final long executed, final long resultRead, final long end) { final StatementInfo result = new StatementInfo(statement.getText()); result.addChild(new StatementInfo("timing "+(end-start)+'/'+(prepared-start)+'/'+(executed-prepared)+'/'+(resultRead-executed)+'/'+(end-resultRead)+" (total/prepare/execute/readResult/close in ms)")); final ArrayList<Object> parameters = statement.parameters; if(parameters!=null) { final StatementInfo parametersChild = new StatementInfo("parameters"); result.addChild(parametersChild); int i = 1; for(Object p : parameters) parametersChild.addChild(new StatementInfo(String.valueOf(i++) + ':' + p)); } final StatementInfo planInfo = dialect.explainExecutionPlan(statement, connection, this); if(planInfo!=null) result.addChild(planInfo); return result; } private final UniqueViolationException wrapException( final SQLException e, final List<UniqueConstraint> threatenedUniqueConstraints) { final String uniqueConstraintID = dialect.extractUniqueConstraintName(e); if(uniqueConstraintID!=null) { final UniqueConstraint constraint; if(Dialect.ANY_CONSTRAINT.equals(uniqueConstraintID)) constraint = (threatenedUniqueConstraints.size()==1) ? threatenedUniqueConstraints.get(0) : null; else { constraint = uniqueConstraintsByID.get(uniqueConstraintID); if(constraint==null) throw new SQLRuntimeException(e, "no unique constraint found for >"+uniqueConstraintID +"<, has only "+uniqueConstraintsByID.keySet()); } return new UniqueViolationException(constraint, null, e); } return null; } /** * Trims a name to length for being a suitable qualifier for database entities, * such as tables, columns, indexes, constraints, partitions etc. */ protected static final String trimString(final String longString, final int maxLength) { if(maxLength<=0) throw new IllegalArgumentException("maxLength must be greater zero"); if(longString.length()==0) throw new IllegalArgumentException("longString must not be empty"); if(longString.length()<=maxLength) return (longString.indexOf('.')<=0) ? longString : longString.replace('.', '_'); int longStringLength = longString.length(); final int[] trimPotential = new int[maxLength]; final ArrayList<String> words = new ArrayList<String>(); { final StringBuffer buf = new StringBuffer(); for(int i=0; i<longString.length(); i++) { char c = longString.charAt(i); if(c=='.') c = '_'; if((c=='_' || Character.isUpperCase(c) || Character.isDigit(c)) && buf.length()>0) { words.add(buf.toString()); int potential = 1; for(int j = buf.length()-1; j>=0; j--, potential++) trimPotential[j] += potential; buf.setLength(0); } if(buf.length()<maxLength) buf.append(c); else longStringLength } if(buf.length()>0) { words.add(buf.toString()); int potential = 1; for(int j = buf.length()-1; j>=0; j--, potential++) trimPotential[j] += potential; buf.setLength(0); } } final int expectedTrimPotential = longStringLength - maxLength; //System.out.println("expected trim potential = "+expectedTrimPotential); int wordLength; int remainder = 0; for(wordLength = trimPotential.length-1; wordLength>=0; wordLength { //System.out.println("trim potential ["+wordLength+"] = "+trimPotential[wordLength]); remainder = trimPotential[wordLength] - expectedTrimPotential; if(remainder>=0) break; } final StringBuffer result = new StringBuffer(longStringLength); for(final String word : words) { //System.out.println("word "+word+" remainder:"+remainder); if((word.length()>wordLength) && remainder>0) { result.append(word.substring(0, wordLength+1)); remainder } else if(word.length()>wordLength) result.append(word.substring(0, wordLength)); else result.append(word); } if(result.length()!=maxLength) throw new RuntimeException(result.toString()+maxLength); return result.toString(); } final String makeName(final String longName) { return makeName(null, longName); } final String makeName(final String prefix, final String longName) { final String query = prefix==null ? longName : prefix+'.'+longName; final String forcedName = forcedNames.getProperty(query); if(forcedName!=null) return forcedName; return trimString(longName, 25); } /** * Search full text. */ final void appendMatchClause(final Statement bf, final StringFunction function, final String value) { if(fulltextIndex) dialect.appendMatchClauseFullTextIndex(bf, function, value); else dialect.appendMatchClauseByLike(bf, function, value); } private final int countTable(final Connection connection, final Table table) { final Statement bf = createStatement(); bf.append("select count(*) from ").defineColumnInteger(). append(table.protectedID); final CountResultSetHandler handler = new CountResultSetHandler(); executeSQLQuery(connection, bf, handler, false, false); return handler.result; } private static class CountResultSetHandler implements ResultSetHandler { int result; public void handle(final ResultSet resultSet) throws SQLException { if(!resultSet.next()) throw new SQLException(NO_SUCH_ROW); result = convertSQLResult(resultSet.getObject(1)); } } final PkSource makePkSource(final Table table) { return butterflyPkSource ? (PkSource)new ButterflyPkSource(table) : new SequentialPkSource(table); } final int[] getMinMaxPK(final Connection connection, final Table table) { buildStage = false; final Statement bf = createStatement(); final String primaryKeyProtectedID = table.primaryKey.protectedID; bf.append("select min("). append(primaryKeyProtectedID).defineColumnInteger(). append("),max("). append(primaryKeyProtectedID).defineColumnInteger(). append(") from "). append(table.protectedID); final NextPKResultSetHandler handler = new NextPKResultSetHandler(); executeSQLQuery(connection, bf, handler, false, false); return handler.result; } private static class NextPKResultSetHandler implements ResultSetHandler { int[] result; public void handle(final ResultSet resultSet) throws SQLException { if(!resultSet.next()) throw new SQLException(NO_SUCH_ROW); final Object oLo = resultSet.getObject(1); if(oLo!=null) { result = new int[2]; result[0] = convertSQLResult(oLo); final Object oHi = resultSet.getObject(2); result[1] = convertSQLResult(oHi); } } } final int checkTypeColumn(final Connection connection, final Type type) { buildStage = false; final Table table = type.getTable(); final Table superTable = type.getSupertype().getTable(); final Statement bf = createStatement(true); bf.append("select count(*) from "). append(table).append(',').append(superTable). append(" where "). append(table.primaryKey).append('=').append(superTable.primaryKey). append(" and "); if(table.typeColumn!=null) bf.append(table.typeColumn); else bf.appendParameter(type.id); bf.append("<>").append(superTable.typeColumn); //System.out.println("CHECKT:"+bf.toString()); final CheckTypeColumnResultSetHandler handler = new CheckTypeColumnResultSetHandler(); executeSQLQuery(connection, bf, handler, false, false); return handler.result; } final int checkTypeColumn(final Connection connection, final ItemField field) { buildStage = false; final Table table = field.getType().getTable(); final Table valueTable = field.getValueType().getTable(); final String alias1 = driver.protectName(Table.SQL_ALIAS_1); final String alias2 = driver.protectName(Table.SQL_ALIAS_2); final Statement bf = createStatement(false); bf.append("select count(*) from "). append(table).append(' ').append(alias1). append(','). append(valueTable).append(' ').append(alias2). append(" where "). append(alias1).append('.').append(field.getColumn()). append('='). append(alias2).append('.').append(valueTable.primaryKey). append(" and "). append(alias1).append('.').append(field.getTypeColumn()). append("<>"). append(alias2).append('.').append(valueTable.typeColumn); //System.out.println("CHECKA:"+bf.toString()); final CheckTypeColumnResultSetHandler handler = new CheckTypeColumnResultSetHandler(); executeSQLQuery(connection, bf, handler, false, false); return handler.result; } private static class CheckTypeColumnResultSetHandler implements ResultSetHandler { int result = Integer.MIN_VALUE; public void handle(final ResultSet resultSet) throws SQLException { if(!resultSet.next()) throw new RuntimeException(); result = resultSet.getInt(1); } } private static final String MIGRATION_COLUMN_VERSION_NAME = "v"; private static final String MIGRATION_COLUMN_COMMENT_NAME = "c"; final Schema makeSchema() { final Schema result = new Schema(driver, connectionPool); for(final Table t : tables) t.makeSchema(result); if(migrationSupported) { final com.exedio.dsmf.Table table = new com.exedio.dsmf.Table(result, Table.MIGRATION_TABLE_NAME); new com.exedio.dsmf.Column(table, MIGRATION_COLUMN_VERSION_NAME, dialect.getIntegerType(0, Integer.MAX_VALUE)); new com.exedio.dsmf.Column(table, MIGRATION_COLUMN_COMMENT_NAME, dialect.getStringType(100)); new com.exedio.dsmf.UniqueConstraint(table, Table.MIGRATION_UNIQUE_CONSTRAINT_NAME, '(' + driver.protectName(MIGRATION_COLUMN_VERSION_NAME) + ')'); } dialect.completeSchema(result); return result; } final Schema makeVerifiedSchema() { final Schema result = makeSchema(); result.verify(); return result; } final int getActualMigrationVersion(final Connection connection) { buildStage = false; final Statement bf = createStatement(); bf.append("select max("). append(driver.protectName(MIGRATION_COLUMN_VERSION_NAME)).defineColumnInteger(). append(") from "). append(driver.protectName(Table.MIGRATION_TABLE_NAME)); final ActualMigrationVersionResultSetHandler handler = new ActualMigrationVersionResultSetHandler(); executeSQLQuery(connection, bf, handler, false, false); return handler.result; } private static class ActualMigrationVersionResultSetHandler implements ResultSetHandler { int result = -1; public void handle(final ResultSet resultSet) throws SQLException { resultSet.next(); result = resultSet.getInt(1); } } final Map<Integer, String> getMigrationLogs() { final ConnectionPool connectionPool = this.connectionPool; Connection con = null; try { con = connectionPool.getConnection(true); return getMigrationLogs(con); } catch(SQLException e) { throw new SQLRuntimeException(e, "getMigrationLogs"); } finally { if(con!=null) { try { connectionPool.putConnection(con); con = null; } catch(SQLException ex) { throw new SQLRuntimeException(ex, "close"); } } } } private final Map<Integer, String> getMigrationLogs(final Connection connection) { buildStage = false; final Statement bf = createStatement(); bf.append("select "). append(driver.protectName(MIGRATION_COLUMN_VERSION_NAME)).defineColumnInteger(). append(','). append(driver.protectName(MIGRATION_COLUMN_COMMENT_NAME)).defineColumnString(). append(" from "). append(driver.protectName(Table.MIGRATION_TABLE_NAME)); final MigrationLogsResultSetHandler handler = new MigrationLogsResultSetHandler(); executeSQLQuery(connection, bf, handler, false, false); return Collections.unmodifiableMap(handler.result); } private static class MigrationLogsResultSetHandler implements ResultSetHandler { final HashMap<Integer, String> result = new HashMap<Integer, String>(); public void handle(final ResultSet resultSet) throws SQLException { while(resultSet.next()) { final int version = resultSet.getInt(1); final String comment = resultSet.getString(2); final String previous = result.put(version, comment); if(previous!=null) throw new RuntimeException("duplicate version " + version + ':' + previous + "/" + comment); } } } private final void notifyMigration(final Connection connection, final int version, final Date date, final String comment, final boolean logToConsole) { assert migrationSupported; final String fullComment = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss.SSS").format(date) + ':' + comment; if(logToConsole) System.out.println("Migrated to version " + version + ':' + fullComment); final Statement bf = createStatement(); bf.append("insert into "). append(driver.protectName(Table.MIGRATION_TABLE_NAME)). append('('). append(driver.protectName(MIGRATION_COLUMN_VERSION_NAME)). append(','). append(driver.protectName(MIGRATION_COLUMN_COMMENT_NAME)). append(")values("). appendParameter(version). append(','). appendParameter(fullComment). append(')'); executeSQLUpdate(connection, bf, 1); } final void migrate(final int expectedVersion, final Migration[] migrations) { assert expectedVersion>=0 : expectedVersion; assert migrationSupported; final ConnectionPool connectionPool = this.connectionPool; Connection con = null; java.sql.Statement stmt = null; try { con = connectionPool.getConnection(true); final int actualVersion = getActualMigrationVersion(con); if(actualVersion>expectedVersion) { throw new IllegalArgumentException("cannot migrate backwards, expected " + expectedVersion + ", but was " + actualVersion); } else if(actualVersion<expectedVersion) { final Migration[] relevant = new Migration[expectedVersion-actualVersion]; for(final Migration migration : migrations) { final int version = migration.version; if(version<=actualVersion || version>expectedVersion) continue; // irrelevant final int relevantIndex = version - actualVersion - 1; assert relevant[relevantIndex]==null : "there is more than one migration for version " + version + ": " + relevant[relevantIndex].comment + " and " + migration.comment; relevant[relevantIndex] = migration; } IntArrayList missing = null; for(int i = 0; i<relevant.length; i++) { if(relevant[i]==null) { if(missing==null) missing = new IntArrayList(); missing.add(i + actualVersion + 1); } } if(missing!=null) throw new IllegalArgumentException( "no migration for versions " + missing.toString() + " on migration from " + actualVersion + " to " + expectedVersion); final Date date = new Date(); stmt = con.createStatement(); for(final Migration migration : relevant) { final String[] body = migration.body; final IntArrayList rowCounts = new IntArrayList(body.length); for(final String sql : body) { try { rowCounts.add(stmt.executeUpdate(sql)); } catch(SQLException e) { throw new SQLRuntimeException(e, sql); } } notifyMigration(con, migration.version, date, migration.comment + ' ' + rowCounts, true); } stmt.close(); stmt = null; } } catch(SQLException e) { throw new SQLRuntimeException(e, "migrate"); } finally { if(stmt!=null) { try { stmt.close(); stmt = null; } catch(SQLException ex) { throw new SQLRuntimeException(ex, "close"); } } if(con!=null) { try { connectionPool.putConnection(con); con = null; } catch(SQLException ex) { throw new SQLRuntimeException(ex, "close"); } } } } /** * @deprecated for debugging only, should never be used in committed code */ @Deprecated protected static final void printMeta(final ResultSet resultSet) throws SQLException { final ResultSetMetaData metaData = resultSet.getMetaData();; final int columnCount = metaData.getColumnCount(); for(int i = 1; i<=columnCount; i++) System.out.println(" } /** * @deprecated for debugging only, should never be used in committed code */ @Deprecated protected static final void printRow(final ResultSet resultSet) throws SQLException { final ResultSetMetaData metaData = resultSet.getMetaData();; final int columnCount = metaData.getColumnCount(); for(int i = 1; i<=columnCount; i++) System.out.println(" } /** * @deprecated for debugging only, should never be used in committed code */ @Deprecated static final ResultSetHandler logHandler = new ResultSetHandler() { public void handle(final ResultSet resultSet) throws SQLException { final int columnCount = resultSet.getMetaData().getColumnCount(); System.out.println("columnCount:"+columnCount); final ResultSetMetaData meta = resultSet.getMetaData(); for(int i = 1; i<=columnCount; i++) { System.out.println(meta.getColumnName(i)+"|"); } while(resultSet.next()) { for(int i = 1; i<=columnCount; i++) { System.out.println(resultSet.getObject(i)+"|"); } } } }; final void close() { getConnectionPool().flush(); } private static final DatabaseListener noopListener = new DatabaseListener() { public void load(Connection connection, PersistentState state) {/* DOES NOTHING */} public void search(Connection connection, Query query, boolean doCountOnly) {/* DOES NOTHING */} }; private DatabaseListener listener = noopListener; private final Object listenerLock = new Object(); final DatabaseListener setListener(DatabaseListener listener) { if(listener==null) listener = noopListener; DatabaseListener result; synchronized(listenerLock) { result = this.listener; this.listener = listener; } if(result==noopListener) result = null; return result; } }
package edu.hm.hafner.util; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.StringReader; import java.net.URISyntaxException; import java.net.URL; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.stream.Stream; import com.google.errorprone.annotations.MustBeClosed; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; /** * Base class for tests that need to read resource files from disk. Provides several useful methods that simplify * reading of resources from disk. * * @author Ullrich Hafner */ @SuppressWarnings("PMD.AbstractClassWithoutAbstractMethod") public abstract class ResourceTest { /** * Reads the contents of the desired resource. The rules for searching resources associated with this test class are * implemented by the defining {@linkplain ClassLoader class loader} of this test class. This method delegates to * this object's class loader. If this object was loaded by the bootstrap class loader, the method delegates to * {@link ClassLoader#getSystemResource}. * <p> * Before delegation, an absolute resource name is constructed from the given resource name using this algorithm: * <p> * <ul> * <li> If the {@code name} begins with a {@code '/'} (<tt>'&#92;u002f'</tt>), then the absolute name of the * resource is the portion of the {@code name} following the {@code '/'}.</li> * <li> Otherwise, the absolute name is of the following form: * <blockquote> {@code modified_package_name/name} </blockquote> * <p> Where the {@code modified_package_name} is the package name of this object with {@code '/'} * substituted for {@code '.'} (<tt>'&#92;u002e'</tt>).</li> * </ul> * * @param fileName * name of the desired resource * * @return the content represented by a byte array */ protected byte[] readAllBytes(final String fileName) { try { return Files.readAllBytes(getPath(fileName)); } catch (IOException | URISyntaxException e) { throw new AssertionError("Can't read resource " + fileName, e); } } @SuppressFBWarnings("UI_INHERITANCE_UNSAFE_GETRESOURCE") private Path getPath(final String name) throws URISyntaxException { URL resource = getClass().getResource(name); if (resource == null) { throw new AssertionError("Can't find resource " + name); } return Paths.get(resource.toURI()); } /** * Read all lines from the desired resource as a {@code Stream}, i.e. this method populates lazily as the stream is * consumed. * <p> * Bytes from the resource are decoded into characters using UTF-8 and the same line terminators as specified by * {@link Files#readAllLines(Path, Charset)} are supported. * </p> * * @param fileName * name of the desired resource * * @return the content represented as a {@link Stream} of lines */ @MustBeClosed protected Stream<String> asStream(final String fileName) { return asStream(fileName, StandardCharsets.UTF_8); } /** * Read all lines from the desired resource as a {@code Stream}, i.e. this method populates lazily as the stream is * consumed. * <p> * Bytes from the resource are decoded into characters using the specified charset and the same line terminators as * specified by {@link Files#readAllLines(Path, Charset)} are supported. * </p> * * @param fileName * name of the desired resource * @param charset * the charset to use for decoding * * @return the content represented as a {@link Stream} of lines */ @MustBeClosed protected Stream<String> asStream(final String fileName, final Charset charset) { try { return Files.lines(getPath(fileName), charset); } catch (IOException | URISyntaxException e) { throw new AssertionError("Can't read resource " + fileName, e); } } /** * Finds a resource with the given name and returns an input stream with UTF-8 decoding. * * @param fileName * name of the desired resource * * @return the content represented as an {@link InputStream} */ protected InputStream asInputStream(final String fileName) { InputStream stream = getTestResourceClass().getResourceAsStream(fileName); if (stream == null) { throw new AssertionError("Can't find resource " + fileName); } return stream; } /** * Returns the class that should be used to read the resource files of a test. * * @return default value is the actual test class */ protected Class<?> getTestResourceClass() { return getClass(); } /** * Finds a resource with the given name and returns the content (decoded with UTF-8) as String. * * @param fileName * name of the desired resource * * @return the content represented as {@link String} */ protected String toString(final String fileName) { return new String(readAllBytes(fileName), StandardCharsets.UTF_8); } /** * Returns the content of the specified {@link File} (decoded with UTF-8) as String. * * @param file * the desired file * * @return the content represented as {@link String} */ protected String toString(final File file) { try { return new String(Files.readAllBytes(file.toPath())); } catch (IOException e) { throw new AssertionError(e); } } /** * Read all lines from the specified text String as a {@code Stream}. * * @param text * the text to return as {@link Stream} of lines * * @return the content represented by a byte array */ @SuppressWarnings({"resource", "IOResourceOpenedButNotSafelyClosed"}) protected Stream<String> getTextLinesAsStream(final String text) { return new BufferedReader(new StringReader(text)).lines(); } }
package com.sequenceiq.cloudbreak.converter; import static com.sequenceiq.cloudbreak.common.type.CloudConstants.AWS; import static com.sequenceiq.cloudbreak.common.type.CloudConstants.AZURE; import static com.sequenceiq.cloudbreak.common.type.CloudConstants.GCP; import java.io.IOException; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; import javax.inject.Inject; import com.sequenceiq.cloudbreak.domain.stack.instance.InstanceGroup; import org.apache.commons.lang3.StringUtils; import org.springframework.stereotype.Component; import com.cloudera.thunderhead.service.usermanagement.UserManagementProto; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.sequenceiq.cloudbreak.api.endpoint.v4.dto.NameOrCrn; import com.sequenceiq.cloudbreak.api.endpoint.v4.common.StackType; import com.sequenceiq.cloudbreak.api.service.ExposedServiceCollector; import com.sequenceiq.cloudbreak.auth.altus.EntitlementService; import com.sequenceiq.cloudbreak.auth.altus.GrpcUmsClient; import com.sequenceiq.cloudbreak.auth.altus.UmsRight; import com.sequenceiq.cloudbreak.auth.altus.VirtualGroupRequest; import com.sequenceiq.cloudbreak.auth.altus.VirtualGroupService; import com.sequenceiq.cloudbreak.auth.crn.Crn; import com.sequenceiq.cloudbreak.cloud.model.CloudCredential; import com.sequenceiq.cloudbreak.cloud.model.ClouderaManagerProduct; import com.sequenceiq.cloudbreak.cloud.model.ClouderaManagerRepo; import com.sequenceiq.cloudbreak.cloud.model.StackInputs; import com.sequenceiq.cloudbreak.cloud.model.StackTags; import com.sequenceiq.cloudbreak.cluster.service.ClusterComponentConfigProvider; import com.sequenceiq.cloudbreak.cmtemplate.cloudstorage.CmCloudStorageConfigProvider; import com.sequenceiq.cloudbreak.cmtemplate.configproviders.ranger.RangerCloudStorageServiceConfigProvider; import com.sequenceiq.cloudbreak.cmtemplate.general.GeneralClusterConfigsProvider; import com.sequenceiq.cloudbreak.common.exception.CloudbreakServiceException; import com.sequenceiq.cloudbreak.common.mappable.CloudPlatform; import com.sequenceiq.cloudbreak.common.service.TransactionService; import com.sequenceiq.cloudbreak.converter.spi.CredentialToCloudCredentialConverter; import com.sequenceiq.cloudbreak.core.bootstrap.service.container.postgres.PostgresConfigService; import com.sequenceiq.cloudbreak.domain.FileSystem; import com.sequenceiq.cloudbreak.domain.StorageLocation; import com.sequenceiq.cloudbreak.domain.cloudstorage.AccountMapping; import com.sequenceiq.cloudbreak.domain.stack.Stack; import com.sequenceiq.cloudbreak.domain.stack.cluster.Cluster; import com.sequenceiq.cloudbreak.domain.stack.cluster.IdBroker; import com.sequenceiq.cloudbreak.domain.stack.cluster.gateway.Gateway; import com.sequenceiq.cloudbreak.dto.LdapView; import com.sequenceiq.cloudbreak.dto.credential.Credential; import com.sequenceiq.cloudbreak.exception.CustomConfigurationsRuntimeVersionException; import com.sequenceiq.cloudbreak.kerberos.KerberosConfigService; import com.sequenceiq.cloudbreak.ldap.LdapConfigService; import com.sequenceiq.cloudbreak.logger.MDCUtils; import com.sequenceiq.cloudbreak.service.customconfigs.CustomConfigurationsService; import com.sequenceiq.cloudbreak.service.GatewayConfigService; import com.sequenceiq.cloudbreak.service.LoadBalancerConfigService; import com.sequenceiq.cloudbreak.service.ServiceEndpointCollector; import com.sequenceiq.cloudbreak.service.blueprint.BlueprintViewProvider; import com.sequenceiq.cloudbreak.service.cluster.ClusterService; import com.sequenceiq.cloudbreak.service.customconfigs.CustomConfigurationsViewProvider; import com.sequenceiq.cloudbreak.service.datalake.SdxClientService; import com.sequenceiq.cloudbreak.service.environment.EnvironmentClientService; import com.sequenceiq.cloudbreak.service.environment.credential.CredentialConverter; import com.sequenceiq.cloudbreak.service.hostgroup.HostGroupService; import com.sequenceiq.cloudbreak.service.idbroker.IdBrokerService; import com.sequenceiq.cloudbreak.service.identitymapping.AwsMockAccountMappingService; import com.sequenceiq.cloudbreak.service.identitymapping.AzureMockAccountMappingService; import com.sequenceiq.cloudbreak.service.identitymapping.GcpMockAccountMappingService; import com.sequenceiq.cloudbreak.service.rdsconfig.RedbeamsDbCertificateProvider; import com.sequenceiq.cloudbreak.service.resource.ResourceService; import com.sequenceiq.cloudbreak.service.sharedservice.DatalakeService; import com.sequenceiq.cloudbreak.tag.AccountTagValidationFailed; import com.sequenceiq.cloudbreak.template.BlueprintProcessingException; import com.sequenceiq.cloudbreak.template.TemplatePreparationObject; import com.sequenceiq.cloudbreak.template.TemplatePreparationObject.Builder; import com.sequenceiq.cloudbreak.template.filesystem.BaseFileSystemConfigurationsView; import com.sequenceiq.cloudbreak.template.filesystem.FileSystemConfigurationProvider; import com.sequenceiq.cloudbreak.template.filesystem.StorageLocationView; import com.sequenceiq.cloudbreak.template.model.GeneralClusterConfigs; import com.sequenceiq.cloudbreak.template.views.AccountMappingView; import com.sequenceiq.cloudbreak.template.views.ClusterExposedServiceView; import com.sequenceiq.cloudbreak.template.views.CustomConfigurationsView; import com.sequenceiq.cloudbreak.template.views.DatalakeView; import com.sequenceiq.cloudbreak.template.views.PlacementView; import com.sequenceiq.cloudbreak.util.StackUtil; import com.sequenceiq.common.api.backup.response.BackupResponse; import com.sequenceiq.common.api.telemetry.response.TelemetryResponse; import com.sequenceiq.common.api.type.ResourceType; import com.sequenceiq.environment.api.v1.environment.model.base.IdBrokerMappingSource; import com.sequenceiq.environment.api.v1.environment.model.response.DetailedEnvironmentResponse; import com.sequenceiq.sdx.api.model.SdxClusterResponse; @Component public class StackToTemplatePreparationObjectConverter { @Inject private HostGroupService hostGroupService; @Inject private ClusterComponentConfigProvider clusterComponentConfigProvider; @Inject private PostgresConfigService postgresConfigService; @Inject private RedbeamsDbCertificateProvider dbCertificateProvider; @Inject private FileSystemConfigurationProvider fileSystemConfigurationProvider; @Inject private ClusterService clusterService; @Inject private GeneralClusterConfigsProvider generalClusterConfigsProvider; @Inject private CustomConfigurationsViewProvider customConfigurationsViewProvider; @Inject private BlueprintViewProvider blueprintViewProvider; @Inject private CredentialConverter credentialConverter; @Inject private LdapConfigService ldapConfigService; @Inject private KerberosConfigService kerberosConfigService; @Inject private EnvironmentClientService environmentClientService; @Inject private AwsMockAccountMappingService awsMockAccountMappingService; @Inject private AzureMockAccountMappingService azureMockAccountMappingService; @Inject private GcpMockAccountMappingService gcpMockAccountMappingService; @Inject private CustomConfigurationsService customConfigurationsService; @Inject private CmCloudStorageConfigProvider cmCloudStorageConfigProvider; @Inject private ServiceEndpointCollector serviceEndpointCollector; @Inject private StackUtil stackUtil; @Inject private VirtualGroupService virtualGroupService; @Inject private EntitlementService entitlementService; @Inject private ExposedServiceCollector exposedServiceCollector; @Inject private ResourceService resourceService; @Inject private GatewayConfigService gatewayConfigService; @Inject private SdxClientService sdxClientService; @Inject private IdBrokerService idBrokerService; @Inject private IdBrokerConverterUtil idBrokerConverterUtil; @Inject private GrpcUmsClient grpcUmsClient; @Inject private LoadBalancerConfigService loadBalancerConfigService; @Inject private TransactionService transactionService; @Inject private CredentialToCloudCredentialConverter credentialToCloudCredentialConverter; @Inject private DatalakeService datalakeService; public TemplatePreparationObject convert(Stack source) { try { Map<String, Collection<ClusterExposedServiceView>> views = serviceEndpointCollector .prepareClusterExposedServicesViews(source.getCluster(), stackUtil.extractClusterManagerAddress(source)); DetailedEnvironmentResponse environment = environmentClientService.getByCrn(source.getEnvironmentCrn()); Credential credential = credentialConverter.convert(environment.getCredential()); Cluster cluster = clusterService.getById(source.getCluster().getId()); FileSystem fileSystem = cluster.getFileSystem(); Optional<LdapView> ldapView = ldapConfigService.get(source.getEnvironmentCrn(), source.getName()); ClouderaManagerRepo cm = clusterComponentConfigProvider.getClouderaManagerRepoDetails(cluster.getId()); List<ClouderaManagerProduct> products = clusterComponentConfigProvider.getClouderaManagerProductDetails(cluster.getId()); BaseFileSystemConfigurationsView fileSystemConfigurationView = getFileSystemConfigurationView(credential, source, fileSystem); updateFileSystemViewWithBackupLocation(environment, fileSystemConfigurationView); StackInputs stackInputs = getStackInputs(source); Map<String, Object> fixInputs = stackInputs.getFixInputs() == null ? new HashMap<>() : stackInputs.getFixInputs(); fixInputs.putAll(stackInputs.getDatalakeInputs() == null ? new HashMap<>() : stackInputs.getDatalakeInputs()); Gateway gateway = cluster.getGateway(); String gatewaySignKey = null; if (gateway != null) { gatewaySignKey = gateway.getSignKey(); } IdBroker idbroker = idBrokerService.getByCluster(cluster); if (idbroker == null) { idbroker = idBrokerConverterUtil.generateIdBrokerSignKeys(cluster); idBrokerService.save(idbroker); } String envCrnForVirtualGroups = getEnvironmentCrnForVirtualGroups(environment); VirtualGroupRequest virtualGroupRequest = new VirtualGroupRequest(envCrnForVirtualGroups, ldapView.map(LdapView::getAdminGroup).orElse("")); String accountId = Crn.safeFromString(source.getResourceCrn()).getAccountId(); List<UserManagementProto.ServicePrincipalCloudIdentities> servicePrincipalCloudIdentities = grpcUmsClient.listServicePrincipalCloudIdentities(accountId, source.getEnvironmentCrn(), MDCUtils.getRequestId()); Builder builder = Builder.builder() .withCloudPlatform(CloudPlatform.valueOf(source.getCloudPlatform())) .withRdsConfigs(postgresConfigService.createRdsConfigIfNeeded(source, cluster)) .withRdsSslCertificateFilePath(dbCertificateProvider.getSslCertsFilePath()) .withGateway(gateway, gatewaySignKey, exposedServiceCollector.getAllKnoxExposed()) .withIdBroker(idbroker) .withCustomConfigurationsView(getCustomConfigurationsView(source, cluster)) .withCustomInputs(stackInputs.getCustomInputs() == null ? new HashMap<>() : stackInputs.getCustomInputs()) .withFixInputs(fixInputs) .withBlueprintView(blueprintViewProvider.getBlueprintView(cluster.getBlueprint())) .withFileSystemConfigurationView(fileSystemConfigurationView) .withGeneralClusterConfigs(calculateGeneralClusterConfigs(source, cluster)) .withLdapConfig(ldapView.orElse(null)) .withKerberosConfig(kerberosConfigService.get(source.getEnvironmentCrn(), source.getName()).orElse(null)) .withProductDetails(cm, products) .withExposedServices(views) .withDefaultTags(getStackTags(source)) .withSharedServiceConfigs(datalakeService.createSharedServiceConfigsView(source)) .withStackType(source.getType()) .withVirtualGroupView(virtualGroupRequest); transactionService.required(() -> { builder.withHostgroups(hostGroupService.getByCluster(cluster.getId())); }); decorateBuilderWithPlacement(source, builder); decorateBuilderWithAccountMapping(source, environment, credential, builder, virtualGroupRequest); decorateBuilderWithServicePrincipals(source, builder, servicePrincipalCloudIdentities); decorateDatalakeView(source, builder); return builder.build(); } catch (AccountTagValidationFailed aTVF) { throw new CloudbreakServiceException(aTVF); } catch (BlueprintProcessingException | IOException | TransactionService.TransactionExecutionException e) { throw new CloudbreakServiceException(e.getMessage(), e); } } private Map<String, String> getStackTags(Stack source) throws IOException { Map<String, String> userDefinedTags = new HashMap<>(); if (source.getTags() != null) { StackTags stackTags = source.getTags().get(StackTags.class); if (stackTags != null) { StackTags stackTag = source.getTags().get(StackTags.class); Map<String, String> userDefined = stackTag.getUserDefinedTags(); Map<String, String> defaultTags = stackTag.getDefaultTags(); Map<String, String> applicationTags = stackTag.getApplicationTags(); if (applicationTags != null) { userDefinedTags.putAll(applicationTags); } if (userDefined != null) { userDefinedTags.putAll(userDefined); } if (defaultTags != null) { userDefinedTags.putAll(defaultTags); } } } return userDefinedTags; } private String getEnvironmentCrnForVirtualGroups(DetailedEnvironmentResponse environment) { String envCrnForVirtualGroups = environment.getCrn(); if (StringUtils.isNoneEmpty(environment.getParentEnvironmentCrn())) { envCrnForVirtualGroups = environment.getParentEnvironmentCrn(); } return envCrnForVirtualGroups; } private BaseFileSystemConfigurationsView getFileSystemConfigurationView(Credential credential, Stack source, FileSystem fileSystem) throws IOException { BaseFileSystemConfigurationsView fileSystemConfigurationView = null; if (source.getCluster().getFileSystem() != null) { fileSystemConfigurationView = fileSystemConfigurationProvider.fileSystemConfiguration(fileSystem, source, (ResourceType r) -> resourceService.findByStackIdAndType(source.getId(), r), credential.getAttributes(), cmCloudStorageConfigProvider.getConfigQueryEntries()); } return fileSystemConfigurationView; } private StackInputs getStackInputs(Stack source) throws IOException { StackInputs stackInputs = source.getInputs().get(StackInputs.class); if (stackInputs == null) { stackInputs = new StackInputs(new HashMap<>(), new HashMap<>(), new HashMap<>()); } return stackInputs; } private void decorateBuilderWithPlacement(Stack source, Builder builder) { String region = source.getRegion(); String availabilityZone = source.getAvailabilityZone(); builder.withPlacementView(new PlacementView(region, availabilityZone)); } private CustomConfigurationsView getCustomConfigurationsView(Stack source, Cluster cluster) { CustomConfigurationsView customConfigurationsView = null; if (StackType.WORKLOAD.equals(source.getType()) && source.getCluster().getCustomConfigurations() != null) { customConfigurationsView = customConfigurationsViewProvider.getCustomConfigurationsView(customConfigurationsService .getByNameOrCrn(NameOrCrn.ofCrn(clusterService.findOneWithCustomConfigurations(cluster.getId()).getCustomConfigurations().getCrn()))); if (customConfigurationsView.getRuntimeVersion() != null && !source.getStackVersion().equals(customConfigurationsView.getRuntimeVersion())) { throw new CustomConfigurationsRuntimeVersionException("Custom Configurations runtime version mismatch!"); } } return customConfigurationsView; } private void decorateBuilderWithAccountMapping(Stack source, DetailedEnvironmentResponse environment, Credential credential, Builder builder, VirtualGroupRequest virtualGroupRequest) { if (source.getType() == StackType.DATALAKE) { AccountMapping accountMapping = isCloudStorageConfigured(source) ? source.getCluster().getFileSystem().getCloudStorage().getAccountMapping() : null; if (accountMapping != null) { builder.withAccountMappingView(new AccountMappingView(accountMapping.getGroupMappings(), accountMapping.getUserMappings())); } else if (environment.getIdBrokerMappingSource() == IdBrokerMappingSource.MOCK && source.getCluster().getFileSystem() != null) { Map<String, String> groupMappings; Map<String, String> userMappings; CloudCredential cloudCredential = credentialToCloudCredentialConverter.convert(credential); String virtualGroup = getMockVirtualGroup(virtualGroupRequest); switch (source.getCloudPlatform()) { case AWS: groupMappings = awsMockAccountMappingService.getGroupMappings(source.getRegion(), cloudCredential, virtualGroup); userMappings = awsMockAccountMappingService.getUserMappings(source.getRegion(), cloudCredential); break; case AZURE: groupMappings = azureMockAccountMappingService.getGroupMappings(AzureMockAccountMappingService.MSI_RESOURCE_GROUP_NAME, cloudCredential, virtualGroup); userMappings = azureMockAccountMappingService.getUserMappings(AzureMockAccountMappingService.MSI_RESOURCE_GROUP_NAME, cloudCredential); break; case GCP: groupMappings = gcpMockAccountMappingService.getGroupMappings(source.getRegion(), cloudCredential, virtualGroup); userMappings = gcpMockAccountMappingService.getUserMappings(source.getRegion(), cloudCredential); break; default: return; } builder.withAccountMappingView(new AccountMappingView(groupMappings, userMappings)); } } } private void decorateDatalakeView(Stack source, TemplatePreparationObject.Builder builder) { DatalakeView datalakeView = null; if (StringUtils.isNotEmpty(source.getEnvironmentCrn()) && StackType.WORKLOAD.equals(source.getType())) { List<SdxClusterResponse> datalakes = sdxClientService.getByEnvironmentCrn(source.getEnvironmentCrn()); if (!datalakes.isEmpty()) { datalakeView = new DatalakeView(datalakes.get(0).getRangerRazEnabled()); } } builder.withDataLakeView(datalakeView); } private String getMockVirtualGroup(VirtualGroupRequest virtualGroupRequest) { return virtualGroupService.getVirtualGroup(virtualGroupRequest, UmsRight.CLOUDER_MANAGER_ADMIN.getRight()); } private boolean isCloudStorageConfigured(Stack source) { return source.getCluster().getFileSystem() != null && source.getCluster().getFileSystem().getCloudStorage() != null; } private GeneralClusterConfigs calculateGeneralClusterConfigs(Stack source, Cluster cluster) { GeneralClusterConfigs generalClusterConfigs = generalClusterConfigsProvider.generalClusterConfigs(source, cluster); boolean allInstanceGroupsHaveMultiAz = source.getInstanceGroups().stream().allMatch(this::isInstanceGroupsHaveMultiAz); generalClusterConfigs.setMultiAzEnabled(allInstanceGroupsHaveMultiAz); if (source.getPrimaryGatewayInstance() != null) { if (StringUtils.isBlank(generalClusterConfigs.getClusterManagerIp())) { String primaryGatewayIp = gatewayConfigService.getPrimaryGatewayIp(source); generalClusterConfigs.setClusterManagerIp(primaryGatewayIp); } Optional<String> instanceDiscoveryFQDN = generalClusterConfigs.getPrimaryGatewayInstanceDiscoveryFQDN(); if (instanceDiscoveryFQDN.isEmpty()) { generalClusterConfigs.setPrimaryGatewayInstanceDiscoveryFQDN(Optional.of(source.getPrimaryGatewayInstance().getDiscoveryFQDN())); } } generalClusterConfigs.setLoadBalancerGatewayFqdn(Optional.ofNullable(loadBalancerConfigService.getLoadBalancerUserFacingFQDN(source.getId()))); generalClusterConfigs.setAccountId(Optional.ofNullable(Crn.safeFromString(source.getResourceCrn()).getAccountId())); return generalClusterConfigs; } boolean isInstanceGroupsHaveMultiAz(InstanceGroup instanceGroup) { return instanceGroup.getAvailabilityZones().size() > 1; } private void decorateBuilderWithServicePrincipals(Stack source, Builder builder, List<UserManagementProto.ServicePrincipalCloudIdentities> servicePrincipalCloudIdentities) { if (StackType.DATALAKE.equals(source.getType()) && AZURE.equals(source.cloudPlatform()) && source.getCluster().isRangerRazEnabled() && entitlementService.cloudIdentityMappingEnabled(Crn.safeFromString(source.getResourceCrn()).getAccountId())) { ImmutableMap.Builder<String, String> azureObjectIdMap = ImmutableMap.builder(); servicePrincipalCloudIdentities.forEach(spCloudId -> { Optional<String> azureObjectId = getOptionalAzureObjectId(spCloudId.getCloudIdentitiesList()); if (azureObjectId.isPresent()) { azureObjectIdMap.put(spCloudId.getServicePrincipal(), azureObjectId.get()); } }); builder.withServicePrincipals(azureObjectIdMap.build()); } else { builder.withServicePrincipals(null); } } private Optional<String> getOptionalAzureObjectId(List<UserManagementProto.CloudIdentity> cloudIdentities) { List<UserManagementProto.CloudIdentity> azureCloudIdentities = cloudIdentities.stream() .filter(cloudIdentity -> cloudIdentity.getCloudIdentityName().hasAzureCloudIdentityName()) .collect(Collectors.toList()); if (azureCloudIdentities.isEmpty()) { return Optional.empty(); } else if (azureCloudIdentities.size() > 1) { throw new IllegalStateException(String.format("List contains multiple azure cloud identities = %s", cloudIdentities)); } else { String azureObjectId = Iterables.getOnlyElement(azureCloudIdentities).getCloudIdentityName().getAzureCloudIdentityName().getObjectId(); return Optional.of(azureObjectId); } } private void updateFileSystemViewWithBackupLocation(DetailedEnvironmentResponse detailedEnvironmentResponse, BaseFileSystemConfigurationsView fileSystemConfigurationView) { if (fileSystemConfigurationView != null) { BackupResponse backupResponse = detailedEnvironmentResponse.getBackup(); TelemetryResponse telemetryResponse = detailedEnvironmentResponse.getTelemetry(); Optional<String> backupLocation = Optional.empty(); if (backupResponse != null && backupResponse.getStorageLocation() != null) { backupLocation = Optional.of(backupResponse.getStorageLocation()); } else if (telemetryResponse != null && telemetryResponse.getLogging() != null) { backupLocation = Optional.of(telemetryResponse.getLogging().getStorageLocation()); } if (backupLocation.isPresent()) { StorageLocation storageLocation = new StorageLocation(); storageLocation.setValue(backupLocation.get()); storageLocation.setProperty(RangerCloudStorageServiceConfigProvider.DEFAULT_BACKUP_DIR); StorageLocationView backupLocationView = new StorageLocationView(storageLocation); fileSystemConfigurationView.getLocations().add(backupLocationView); } } } }
package nl.matsv.viabackwards.protocol.protocol1_12to1_11_1.packets; import nl.matsv.viabackwards.ViaBackwards; import nl.matsv.viabackwards.api.entities.storage.EntityData; import nl.matsv.viabackwards.api.entities.storage.MetaStorage; import nl.matsv.viabackwards.api.entities.types.AbstractEntityType; import nl.matsv.viabackwards.api.entities.types.EntityType1_12; import nl.matsv.viabackwards.api.exceptions.RemovedValueException; import nl.matsv.viabackwards.api.rewriters.EntityRewriter; import nl.matsv.viabackwards.protocol.protocol1_12to1_11_1.Protocol1_11_1To1_12; import nl.matsv.viabackwards.protocol.protocol1_12to1_11_1.data.ParrotStorage; import nl.matsv.viabackwards.utils.Block; import us.myles.ViaVersion.api.PacketWrapper; import us.myles.ViaVersion.api.Via; import us.myles.ViaVersion.api.minecraft.metadata.Metadata; import us.myles.ViaVersion.api.minecraft.metadata.types.MetaType1_12; import us.myles.ViaVersion.api.remapper.PacketHandler; import us.myles.ViaVersion.api.remapper.PacketRemapper; import us.myles.ViaVersion.api.type.Type; import us.myles.ViaVersion.api.type.types.version.Types1_12; import us.myles.ViaVersion.packets.State; import us.myles.ViaVersion.protocols.protocol1_9_3to1_9_1_2.storage.ClientWorld; import java.util.Optional; import static nl.matsv.viabackwards.api.entities.types.EntityType1_12.*; public class EntityPackets1_12 extends EntityRewriter<Protocol1_11_1To1_12> { @Override protected void registerPackets(Protocol1_11_1To1_12 protocol) { // Spawn Object protocol.registerOutgoing(State.PLAY, 0x00, 0x00, new PacketRemapper() { @Override public void registerMap() { map(Type.VAR_INT); // 0 - Entity id map(Type.UUID); // 1 - UUID map(Type.BYTE); // 2 - Type map(Type.DOUBLE); // 3 - x map(Type.DOUBLE); // 4 - y map(Type.DOUBLE); // 5 - z map(Type.BYTE); // 6 - Pitch map(Type.BYTE); // 7 - Yaw map(Type.INT); // 8 - data // Track Entity handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { addTrackedEntity( wrapper.user(), wrapper.get(Type.VAR_INT, 0), getTypeFromId(wrapper.get(Type.BYTE, 0), true) ); } }); handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { Optional<EntityType1_12.ObjectType> type = ObjectType.findById(wrapper.get(Type.BYTE, 0)); if (type.isPresent()) { Optional<EntityData> optEntDat = getObjectData(type.get()); if (optEntDat.isPresent()) { EntityData data = optEntDat.get(); wrapper.set(Type.BYTE, 0, ((Integer) data.getReplacementId()).byteValue()); if (data.getObjectData() != -1) wrapper.set(Type.INT, 0, data.getObjectData()); } } else { if (Via.getManager().isDebug()) { ViaBackwards.getPlatform().getLogger().warning("Could not find Entity Type" + wrapper.get(Type.BYTE, 0)); } } } }); // Handle FallingBlock blocks handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { Optional<EntityType1_12.ObjectType> type = ObjectType.findById(wrapper.get(Type.BYTE, 0)); if (type.isPresent() && type.get().equals(ObjectType.FALLING_BLOCK)) { int objectData = wrapper.get(Type.INT, 0); int objType = objectData & 4095; int data = objectData >> 12 & 15; Block block = getProtocol().getBlockItemPackets().handleBlock(objType, data); if (block == null) return; wrapper.set(Type.INT, 0, block.getId() | block.getData() << 12); } } }); } }); // Spawn Experience Orb protocol.registerOutgoing(State.PLAY, 0x01, 0x01, new PacketRemapper() { @Override public void registerMap() { map(Type.VAR_INT); // 0 - Entity id // Track entity handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { addTrackedEntity( wrapper.user(), wrapper.get(Type.VAR_INT, 0), ObjectType.THROWN_EXP_BOTTLE.getType() ); } }); } }); // Spawn Global Entity protocol.registerOutgoing(State.PLAY, 0x02, 0x02, new PacketRemapper() { @Override public void registerMap() { map(Type.VAR_INT); // 0 - Entity ID map(Type.BYTE); // 1 - Type // Track entity handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { addTrackedEntity( wrapper.user(), wrapper.get(Type.VAR_INT, 0), EntityType.WEATHER // Always thunder according to wiki.vg ); } }); } }); // Spawn Mob protocol.registerOutgoing(State.PLAY, 0x03, 0x03, new PacketRemapper() { @Override public void registerMap() { map(Type.VAR_INT); // 0 - Entity id map(Type.UUID); // 1 - UUID map(Type.VAR_INT); // 2 - Entity Type map(Type.DOUBLE); // 3 - X map(Type.DOUBLE); // 4 - Y map(Type.DOUBLE); // 5 - Z map(Type.BYTE); // 6 - Yaw map(Type.BYTE); // 7 - Pitch map(Type.BYTE); // 8 - Head Pitch map(Type.SHORT); // 9 - Velocity X map(Type.SHORT); // 10 - Velocity Y map(Type.SHORT); // 11 - Velocity Z map(Types1_12.METADATA_LIST); // 12 - Metadata // Track entity handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { addTrackedEntity( wrapper.user(), wrapper.get(Type.VAR_INT, 0), getTypeFromId(wrapper.get(Type.VAR_INT, 1), false) ); } }); // Rewrite entity type / metadata handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { int entityId = wrapper.get(Type.VAR_INT, 0); AbstractEntityType type = getEntityType(wrapper.user(), entityId); MetaStorage storage = new MetaStorage(wrapper.get(Types1_12.METADATA_LIST, 0)); handleMeta( wrapper.user(), wrapper.get(Type.VAR_INT, 0), storage ); Optional<EntityData> optEntDat = getEntityData(type); if (optEntDat.isPresent()) { EntityData data = optEntDat.get(); wrapper.set(Type.VAR_INT, 1, data.getReplacementId()); if (data.hasBaseMeta()) data.getDefaultMeta().handle(storage); } // Rewrite Metadata wrapper.set( Types1_12.METADATA_LIST, 0, storage.getMetaDataList() ); } }); } }); // Spawn Painting protocol.registerOutgoing(State.PLAY, 0x04, 0x04, new PacketRemapper() { @Override public void registerMap() { map(Type.VAR_INT); // 0 - Entity ID // Track entity handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { addTrackedEntity( wrapper.user(), wrapper.get(Type.VAR_INT, 0), EntityType.PAINTING ); } }); } }); // Spawn Player protocol.registerOutgoing(State.PLAY, 0x05, 0x05, new PacketRemapper() { @Override public void registerMap() { map(Type.VAR_INT); // 0 - Entity ID map(Type.UUID); // 1 - Player UUID map(Type.DOUBLE); // 2 - X map(Type.DOUBLE); // 3 - Y map(Type.DOUBLE); // 4 - Z map(Type.BYTE); // 5 - Yaw map(Type.BYTE); // 6 - Pitch map(Types1_12.METADATA_LIST); // 7 - Metadata list // Track Entity handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { addTrackedEntity( wrapper.user(), wrapper.get(Type.VAR_INT, 0), EntityType.PLAYER ); } }); // Rewrite Metadata handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { wrapper.set( Types1_12.METADATA_LIST, 0, handleMeta( wrapper.user(), wrapper.get(Type.VAR_INT, 0), new MetaStorage(wrapper.get(Types1_12.METADATA_LIST, 0)) ).getMetaDataList() ); } }); } }); // Join game protocol.registerOutgoing(State.PLAY, 0x23, 0x23, new PacketRemapper() { @Override public void registerMap() { map(Type.INT); // 0 - Entity ID map(Type.UNSIGNED_BYTE); // 1 - Gamemode map(Type.INT); // 2 - Dimension handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { addTrackedEntity( wrapper.user(), wrapper.get(Type.INT, 0), EntityType.PLAYER ); } }); handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { ClientWorld clientWorld = wrapper.user().get(ClientWorld.class); int dimensionId = wrapper.get(Type.INT, 1); clientWorld.setEnvironment(dimensionId); } }); } }); // Respawn Packet (save dimension id) protocol.registerOutgoing(State.PLAY, 0x34, 0x33, new PacketRemapper() { @Override public void registerMap() { map(Type.INT); // 0 - Dimension ID handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { ClientWorld clientWorld = wrapper.user().get(ClientWorld.class); int dimensionId = wrapper.get(Type.INT, 0); clientWorld.setEnvironment(dimensionId); } }); } }); // Destroy entities protocol.registerOutgoing(State.PLAY, 0x31, 0x30, new PacketRemapper() { @Override public void registerMap() { map(Type.VAR_INT_ARRAY); // 0 - Entity IDS handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { for (int entity : wrapper.get(Type.VAR_INT_ARRAY, 0)) getEntityTracker(wrapper.user()).removeEntity(entity); } }); } }); // Metadata packet protocol.registerOutgoing(State.PLAY, 0x3B, 0x39, new PacketRemapper() { @Override public void registerMap() { map(Type.VAR_INT); // 0 - Entity ID map(Types1_12.METADATA_LIST); // 1 - Metadata list handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { wrapper.set( Types1_12.METADATA_LIST, 0, handleMeta( wrapper.user(), wrapper.get(Type.VAR_INT, 0), new MetaStorage(wrapper.get(Types1_12.METADATA_LIST, 0)) ).getMetaDataList() ); } }); } }); } @Override protected void registerRewrites() { regEntType(EntityType.PARROT, EntityType.BAT).mobName("Parrot").spawnMetadata(storage -> storage.add(new Metadata(12, MetaType1_12.Byte, (byte) 0x00))); regEntType(EntityType.ILLUSION_ILLAGER, EntityType.EVOCATION_ILLAGER).mobName("Illusioner"); // Handle Illager TODO wtf does this metadata do? Is aggresive it is a bitmask? registerMetaHandler().filter(EntityType.EVOCATION_ILLAGER, true, 12).removed(); registerMetaHandler().filter(EntityType.EVOCATION_ILLAGER, true, 13).handleIndexChange(12); registerMetaHandler().filter(EntityType.ILLUSION_ILLAGER, 0).handle(e -> { byte mask = (byte) e.getData().getValue(); if ((mask & 0x20) == 0x20) mask &= ~0x20; e.getData().setValue(mask); return e.getData(); }); // Create Parrot storage registerMetaHandler().filter(EntityType.PARROT, true).handle(e -> { if (!e.getEntity().has(ParrotStorage.class)) e.getEntity().put(new ParrotStorage()); return e.getData(); }); // Parrot remove animal metadata registerMetaHandler().filter(EntityType.PARROT, 12).removed(); // Is baby registerMetaHandler().filter(EntityType.PARROT, 13).handle(e -> { ParrotStorage storage = e.getEntity().get(ParrotStorage.class); boolean isTamed = (((byte) e.getData().getValue()) & 0x04) == 0x04; if (!storage.isTamed() && isTamed) { // TODO do something to let the user know it's done } storage.setTamed(isTamed); throw new RemovedValueException(); }); // Flags (Is sitting etc, might be useful in the future registerMetaHandler().filter(EntityType.PARROT, 14).removed(); // Owner registerMetaHandler().filter(EntityType.PARROT, 15).removed(); // Variant // Left shoulder entity data registerMetaHandler().filter(EntityType.PLAYER, 15).removed(); // Right shoulder entity data registerMetaHandler().filter(EntityType.PLAYER, 16).removed(); } }
package hu.bme.mit.spaceship; import static org.junit.Assert.*; import static org.mockito.Mockito.*; import org.junit.Before; import org.junit.Test; public class GT4500Test { private GT4500 ship; @Before public void init(){ TorpedoStore primary = mock(TorpedoStore.class); TorpedoStore secondary = mock(TorpedoStore.class); this.ship = new GT4500(primary, secondary); } @Test public void fireTorpedos_Single_Success(){ // Arrange when(ship.fireTorpedos(FiringMode.SINGLE)).thenReturn(true); // Act boolean result = ship.fireTorpedos(FiringMode.SINGLE); // Assert verify(ship, times(1)).fireTorpedos(FiringMode.SINGLE); } @Test public void fireTorpedos_All_Success(){ // Arrange when(ship.fireTorpedos(FiringMode.ALL)).thenReturn(true); // Act boolean result = ship.fireTorpedos(FiringMode.ALL); // Assert verify(ship, times(1)).fireTorpedos(FiringMode.ALL); } }
package org.opendaylight.yangtools.yang.data.api; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Verify.verify; import static java.util.Objects.requireNonNull; import com.google.common.annotations.Beta; import com.google.common.base.VerifyException; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import java.io.Serializable; import java.lang.invoke.MethodHandles; import java.lang.invoke.VarHandle; import java.lang.reflect.Array; import java.util.AbstractMap.SimpleImmutableEntry; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Deque; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.function.Function; import org.eclipse.jdt.annotation.NonNull; import org.eclipse.jdt.annotation.Nullable; import org.opendaylight.yangtools.concepts.Builder; import org.opendaylight.yangtools.concepts.HierarchicalIdentifier; import org.opendaylight.yangtools.concepts.Immutable; import org.opendaylight.yangtools.util.HashCodeBuilder; import org.opendaylight.yangtools.util.ImmutableOffsetMap; import org.opendaylight.yangtools.util.SingletonSet; import org.opendaylight.yangtools.yang.common.QName; import org.opendaylight.yangtools.yang.common.QNameModule; import org.opendaylight.yangtools.yang.data.api.schema.LeafSetEntryNode; // FIXME: 7.0.0: this concept needs to be moved to yang-common, as parser components need the ability to refer // to data nodes -- most notably XPath expressions and {@code default} statement arguments need to be able // to represent these. public abstract class YangInstanceIdentifier implements HierarchicalIdentifier<YangInstanceIdentifier> { private static final long serialVersionUID = 4L; private static final VarHandle TO_STRING_CACHE; static { try { TO_STRING_CACHE = MethodHandles.lookup().findVarHandle(YangInstanceIdentifier.class, "toStringCache", String.class); } catch (NoSuchFieldException | IllegalAccessException e) { throw new ExceptionInInitializerError(e); } } private final int hash; @SuppressWarnings("unused") private transient String toStringCache = null; // Package-private to prevent outside subclassing YangInstanceIdentifier(final int hash) { this.hash = hash; } /** * Return An empty {@link YangInstanceIdentifier}. It corresponds to the path of the conceptual root of the YANG * namespace. * * @return An empty YangInstanceIdentifier */ public static @NonNull YangInstanceIdentifier empty() { return FixedYangInstanceIdentifier.EMPTY_INSTANCE; } abstract @NonNull YangInstanceIdentifier createRelativeIdentifier(int skipFromRoot); abstract @Nullable Collection<PathArgument> tryPathArguments(); abstract @Nullable Collection<PathArgument> tryReversePathArguments(); /** * Check if this instance identifier has empty path arguments, e.g. it is * empty and corresponds to {@link #empty()}. * * @return True if this instance identifier is empty, false otherwise. */ public abstract boolean isEmpty(); /** * Return an optimized version of this identifier, useful when the identifier * will be used very frequently. * * @return A optimized equivalent instance. */ public abstract @NonNull YangInstanceIdentifier toOptimized(); /** * Return the conceptual parent {@link YangInstanceIdentifier}, which has * one item less in {@link #getPathArguments()}. * * @return Parent {@link YangInstanceIdentifier}, or null if this object is {@link #empty()}. */ public abstract @Nullable YangInstanceIdentifier getParent(); /** * Return the conceptual parent {@link YangInstanceIdentifier}, which has one item less in * {@link #getPathArguments()}. * * @return Parent {@link YangInstanceIdentifier} * @throws VerifyException if this object is {@link #empty()}. */ public abstract @NonNull YangInstanceIdentifier coerceParent(); public abstract @NonNull YangInstanceIdentifier getAncestor(int depth); /** * Returns an ordered iteration of path arguments. * * @return Immutable iteration of path arguments. */ public abstract @NonNull List<PathArgument> getPathArguments(); /** * Returns an iterable of path arguments in reverse order. This is useful * when walking up a tree organized this way. * * @return Immutable iterable of path arguments in reverse order. */ public abstract @NonNull List<PathArgument> getReversePathArguments(); /** * Returns the last PathArgument. This is equivalent of iterating * to the last element of the iterable returned by {@link #getPathArguments()}. * * @return The last past argument, or null if there are no PathArguments. */ public abstract PathArgument getLastPathArgument(); public static @NonNull YangInstanceIdentifier create(final Iterable<? extends PathArgument> path) { if (Iterables.isEmpty(path)) { return empty(); } final HashCodeBuilder<PathArgument> hash = new HashCodeBuilder<>(); for (PathArgument a : path) { hash.addArgument(a); } return FixedYangInstanceIdentifier.create(path, hash.build()); } public static @NonNull YangInstanceIdentifier create(final PathArgument pathArgument) { return new FixedYangInstanceIdentifier(ImmutableList.of(pathArgument), HashCodeBuilder.nextHashCode(1, pathArgument)); } public static @NonNull YangInstanceIdentifier create(final PathArgument... path) { // We are forcing a copy, since we cannot trust the user return create(Arrays.asList(path)); } /** * Create a {@link YangInstanceIdentifier} by taking a snapshot of provided path and iterating it backwards. * * @param pathTowardsRoot Path towards root * @return A {@link YangInstanceIdentifier} instance * @throws NullPointerException if {@code pathTowardsRoot} or any of its members is null */ public static @NonNull YangInstanceIdentifier createReverse(final Deque<PathArgument> pathTowardsRoot) { final ImmutableList.Builder<PathArgument> builder = ImmutableList.builderWithExpectedSize( pathTowardsRoot.size()); pathTowardsRoot.descendingIterator().forEachRemaining(builder::add); return YangInstanceIdentifier.create(builder.build()); } /** * Create a {@link YangInstanceIdentifier} by walking specified stack backwards and extracting path components * from it. * * @param stackTowardsRoot Stack towards root, * @return A {@link YangInstanceIdentifier} instance * @throws NullPointerException if {@code pathTowardsRoot} is null */ public static <T> @NonNull YangInstanceIdentifier createReverse(final Deque<? extends T> stackTowardsRoot, final Function<T, PathArgument> function) { final ImmutableList.Builder<PathArgument> builder = ImmutableList.builderWithExpectedSize( stackTowardsRoot.size()); final Iterator<? extends T> it = stackTowardsRoot.descendingIterator(); while (it.hasNext()) { builder.add(function.apply(it.next())); } return YangInstanceIdentifier.create(builder.build()); } boolean pathArgumentsEqual(final YangInstanceIdentifier other) { return Iterables.elementsEqual(getPathArguments(), other.getPathArguments()); } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (!(obj instanceof YangInstanceIdentifier)) { return false; } YangInstanceIdentifier other = (YangInstanceIdentifier) obj; if (this.hashCode() != obj.hashCode()) { return false; } return pathArgumentsEqual(other); } /** * Constructs a new Instance Identifier with new {@link NodeIdentifier} added to the end of path arguments. * * @param name QName of {@link NodeIdentifier} * @return Instance Identifier with additional path argument added to the end. */ public final @NonNull YangInstanceIdentifier node(final QName name) { return node(new NodeIdentifier(name)); } /** * Constructs a new Instance Identifier with new {@link PathArgument} added to the end of path arguments. * * @param arg Path argument which should be added to the end * @return Instance Identifier with additional path argument added to the end. */ public final @NonNull YangInstanceIdentifier node(final PathArgument arg) { return new StackedYangInstanceIdentifier(this, arg, HashCodeBuilder.nextHashCode(hash, arg)); } /** * Get the relative path from an ancestor. This method attempts to perform * the reverse of concatenating a base (ancestor) and a path. * * @param ancestor * Ancestor against which the relative path should be calculated * @return This object's relative path from parent, or Optional.absent() if * the specified parent is not in fact an ancestor of this object. */ public Optional<YangInstanceIdentifier> relativeTo(final YangInstanceIdentifier ancestor) { if (this == ancestor) { return Optional.of(empty()); } if (ancestor.isEmpty()) { return Optional.of(this); } final Iterator<PathArgument> lit = getPathArguments().iterator(); final Iterator<PathArgument> oit = ancestor.getPathArguments().iterator(); int common = 0; while (oit.hasNext()) { // Ancestor is not really an ancestor if (!lit.hasNext() || !lit.next().equals(oit.next())) { return Optional.empty(); } ++common; } if (common == 0) { return Optional.of(this); } if (!lit.hasNext()) { return Optional.of(empty()); } return Optional.of(createRelativeIdentifier(common)); } @Override public final boolean contains(final YangInstanceIdentifier other) { if (this == other) { return true; } checkArgument(other != null, "other should not be null"); final Iterator<PathArgument> lit = getPathArguments().iterator(); final Iterator<PathArgument> oit = other.getPathArguments().iterator(); while (lit.hasNext()) { if (!oit.hasNext()) { return false; } if (!lit.next().equals(oit.next())) { return false; } } return true; } @Override public final String toString() { /* * The toStringCache is safe, since the object contract requires * immutability of the object and all objects referenced from this * object. * Used lists, maps are immutable. Path Arguments (elements) are also * immutable, since the PathArgument contract requires immutability. * The cache is thread-safe - if multiple computations occurs at the * same time, cache will be overwritten with same result. */ final String ret = (String) TO_STRING_CACHE.getAcquire(this); return ret != null ? ret : loadToString(); } private String loadToString() { final StringBuilder builder = new StringBuilder("/"); PathArgument prev = null; for (PathArgument argument : getPathArguments()) { if (prev != null) { builder.append('/'); } builder.append(argument.toRelativeString(prev)); prev = argument; } final String ret = builder.toString(); final String witness = (String) TO_STRING_CACHE.compareAndExchangeRelease(this, null, ret); return witness == null ? ret : witness; } @Override public final int hashCode() { /* * The caching is safe, since the object contract requires * immutability of the object and all objects referenced from this * object. * Used lists, maps are immutable. Path Arguments (elements) are also * immutable, since the PathArgument contract requires immutability. */ return hash; } @SuppressFBWarnings(value = "UPM_UNCALLED_PRIVATE_METHOD", justification = "https://github.com/spotbugs/spotbugs/issues/811") private static int hashCode(final Object value) { if (value == null) { return 0; } if (byte[].class.equals(value.getClass())) { return Arrays.hashCode((byte[]) value); } if (value.getClass().isArray()) { int hash = 0; int length = Array.getLength(value); for (int i = 0; i < length; i++) { hash += Objects.hashCode(Array.get(value, i)); } return hash; } return Objects.hashCode(value); } final Object writeReplace() { return new YIDv1(this); } // Static factories & helpers /** * Returns a new InstanceIdentifier with only one path argument of type {@link NodeIdentifier} with supplied * QName. * * @param name QName of first node identifier * @return Instance Identifier with only one path argument of type {@link NodeIdentifier} */ public static @NonNull YangInstanceIdentifier of(final QName name) { return create(new NodeIdentifier(name)); } /** * Returns new builder for InstanceIdentifier with empty path arguments. * * @return new builder for InstanceIdentifier with empty path arguments. */ public static @NonNull InstanceIdentifierBuilder builder() { return new YangInstanceIdentifierBuilder(); } /** * Returns new builder for InstanceIdentifier with path arguments copied from original instance identifier. * * @param origin InstanceIdentifier from which path arguments are copied. * @return new builder for InstanceIdentifier with path arguments copied from original instance identifier. */ public static @NonNull InstanceIdentifierBuilder builder(final YangInstanceIdentifier origin) { return new YangInstanceIdentifierBuilder(origin.getPathArguments(), origin.hashCode()); } /** * Path argument / component of InstanceIdentifier. * Path argument uniquely identifies node in data tree on particular * level. * * <p> * This interface itself is used as common parent for actual * path arguments types and should not be implemented by user code. * * <p> * Path arguments SHOULD contain only minimum of information * required to uniquely identify node on particular subtree level. * * <p> * For actual path arguments types see: * <ul> * <li>{@link NodeIdentifier} - Identifier of container or leaf * <li>{@link NodeIdentifierWithPredicates} - Identifier of list entries, which have key defined * <li>{@link AugmentationIdentifier} - Identifier of augmentation * <li>{@link NodeWithValue} - Identifier of leaf-list entry * </ul> */ public interface PathArgument extends Comparable<PathArgument>, Immutable, Serializable { /** * Returns unique QName of data node as defined in YANG Schema, if available. * * @return Node type * @throws UnsupportedOperationException if node type is not applicable, for example in case of an augmentation. */ @NonNull QName getNodeType(); /** * Return the string representation of this object for use in context * provided by a previous object. This method can be implemented in * terms of {@link #toString()}, but implementations are encourage to * reuse any context already emitted by the previous object. * * @param previous Previous path argument * @return String representation */ @NonNull String toRelativeString(PathArgument previous); } private abstract static class AbstractPathArgument implements PathArgument { private static final long serialVersionUID = -4546547994250849340L; private final @NonNull QName nodeType; private transient volatile int hashValue; protected AbstractPathArgument(final QName nodeType) { this.nodeType = requireNonNull(nodeType); } @Override public final QName getNodeType() { return nodeType; } @Override @SuppressWarnings("checkstyle:parameterName") public int compareTo(final PathArgument o) { return nodeType.compareTo(o.getNodeType()); } protected int hashCodeImpl() { return nodeType.hashCode(); } @Override public final int hashCode() { int local; return (local = hashValue) != 0 ? local : (hashValue = hashCodeImpl()); } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null || this.getClass() != obj.getClass()) { return false; } return getNodeType().equals(((AbstractPathArgument)obj).getNodeType()); } @Override public String toString() { return getNodeType().toString(); } @Override public String toRelativeString(final PathArgument previous) { if (previous instanceof AbstractPathArgument) { final QNameModule mod = previous.getNodeType().getModule(); if (getNodeType().getModule().equals(mod)) { return getNodeType().getLocalName(); } } return getNodeType().toString(); } abstract Object writeReplace(); } /** * Simple path argument identifying a {@link org.opendaylight.yangtools.yang.data.api.schema.ContainerNode} or * {@link org.opendaylight.yangtools.yang.data.api.schema.LeafNode} leaf in particular subtree. */ public static final class NodeIdentifier extends AbstractPathArgument { private static final long serialVersionUID = -2255888212390871347L; private static final LoadingCache<QName, NodeIdentifier> CACHE = CacheBuilder.newBuilder().weakValues() .build(new CacheLoader<QName, NodeIdentifier>() { @Override public NodeIdentifier load(final QName key) { return new NodeIdentifier(key); } }); public NodeIdentifier(final QName node) { super(node); } /** * Return a NodeIdentifier for a particular QName. Unlike the constructor, this factory method uses a global * instance cache, resulting in object reuse for equal inputs. * * @param node Node's QName * @return A {@link NodeIdentifier} */ public static @NonNull NodeIdentifier create(final QName node) { return CACHE.getUnchecked(node); } @Override Object writeReplace() { return new NIv1(this); } } /** * Composite path argument identifying a {@link org.opendaylight.yangtools.yang.data.api.schema.MapEntryNode} leaf * overall data tree. */ public abstract static class NodeIdentifierWithPredicates extends AbstractPathArgument { @Beta public static final class Singleton extends NodeIdentifierWithPredicates { private static final long serialVersionUID = 1L; private final @NonNull QName key; private final @NonNull Object value; Singleton(final QName node, final QName key, final Object value) { super(node); this.key = requireNonNull(key); this.value = requireNonNull(value); } @Override public SingletonSet<Entry<QName, Object>> entrySet() { return SingletonSet.of(singleEntry()); } @Override public SingletonSet<QName> keySet() { return SingletonSet.of(key); } @Override public boolean containsKey(final QName qname) { return key.equals(requireNonNull(qname)); } @Override public SingletonSet<Object> values() { return SingletonSet.of(value); } @Override public int size() { return 1; } @Override public ImmutableMap<QName, Object> asMap() { return ImmutableMap.of(key, value); } /** * Return the single entry contained in this object. This is equivalent to * {@code entrySet().iterator().next()}. * * @return A single entry. */ public @NonNull Entry<QName, Object> singleEntry() { return new SimpleImmutableEntry<>(key, value); } @Override boolean equalMapping(final NodeIdentifierWithPredicates other) { final Singleton single = (Singleton) other; return key.equals(single.key) && Objects.deepEquals(value, single.value); } @Override Object keyValue(final QName qname) { return key.equals(qname) ? value : null; } } private static final class Regular extends NodeIdentifierWithPredicates { private static final long serialVersionUID = 1L; private final @NonNull Map<QName, Object> keyValues; Regular(final QName node, final Map<QName, Object> keyValues) { super(node); this.keyValues = requireNonNull(keyValues); } @Override public Set<Entry<QName, Object>> entrySet() { return keyValues.entrySet(); } @Override public Set<QName> keySet() { return keyValues.keySet(); } @Override public boolean containsKey(final QName qname) { return keyValues.containsKey(requireNonNull(qname)); } @Override public Collection<Object> values() { return keyValues.values(); } @Override public int size() { return keyValues.size(); } @Override public Map<QName, Object> asMap() { return keyValues; } @Override Object keyValue(final QName qname) { return keyValues.get(qname); } @Override boolean equalMapping(final NodeIdentifierWithPredicates other) { final Map<QName, Object> otherKeyValues = ((Regular) other).keyValues; // TODO: benchmark to see if just calling equals() on the two maps is not faster if (keyValues == otherKeyValues) { return true; } if (keyValues.size() != otherKeyValues.size()) { return false; } for (Entry<QName, Object> entry : entrySet()) { final Object otherValue = otherKeyValues.get(entry.getKey()); if (otherValue == null || !Objects.deepEquals(entry.getValue(), otherValue)) { return false; } } return true; } } private static final long serialVersionUID = -4787195606494761540L; NodeIdentifierWithPredicates(final QName node) { super(node); } public static @NonNull NodeIdentifierWithPredicates of(final QName node) { return new Regular(node, ImmutableMap.of()); } public static @NonNull NodeIdentifierWithPredicates of(final QName node, final QName key, final Object value) { return new Singleton(node, key, value); } public static @NonNull NodeIdentifierWithPredicates of(final QName node, final Entry<QName, Object> entry) { return of(node, entry.getKey(), entry.getValue()); } public static @NonNull NodeIdentifierWithPredicates of(final QName node, final Map<QName, Object> keyValues) { return keyValues.size() == 1 ? of(keyValues, node) // Retains ImmutableMap for empty maps. For larger sizes uses a shared key set. : new Regular(node, ImmutableOffsetMap.unorderedCopyOf(keyValues)); } public static @NonNull NodeIdentifierWithPredicates of(final QName node, final ImmutableOffsetMap<QName, Object> keyValues) { return keyValues.size() == 1 ? of(keyValues, node) : new Regular(node, keyValues); } private static @NonNull NodeIdentifierWithPredicates of(final Map<QName, Object> keyValues, final QName node) { return of(node, keyValues.entrySet().iterator().next()); } /** * Return the set of predicates keys and values. Keys are guaranteeed to be unique. * * @return Predicate set. */ public abstract @NonNull Set<Entry<QName, Object>> entrySet(); /** * Return the predicate key in the iteration order of {@link #entrySet()}. * * @return Predicate values. */ public abstract @NonNull Set<QName> keySet(); /** * Determine whether a particular predicate key is present. * * @param key Predicate key * @return True if the predicate is present, false otherwise * @throws NullPointerException if {@code key} is null */ public abstract boolean containsKey(QName key); /** * Return the predicate values in the iteration order of {@link #entrySet()}. * * @return Predicate values. */ public abstract @NonNull Collection<Object> values(); @Beta public final @Nullable Object getValue(final QName key) { return keyValue(requireNonNull(key)); } @Beta public final <T> @Nullable T getValue(final QName key, final Class<T> valueClass) { return valueClass.cast(getValue(key)); } /** * Return the number of predicates present. * * @return The number of predicates present. */ public abstract int size(); /** * A Map-like view of this identifier's predicates. The view is expected to be stable and effectively-immutable. * * @return Map of predicates. */ @Beta public abstract @NonNull Map<QName, Object> asMap(); @Override protected final int hashCodeImpl() { int result = 31 * super.hashCodeImpl(); for (Entry<QName, Object> entry : entrySet()) { result += entry.getKey().hashCode() + YangInstanceIdentifier.hashCode(entry.getValue()); } return result; } @Override @SuppressWarnings("checkstyle:equalsHashCode") public final boolean equals(final Object obj) { return super.equals(obj) && equalMapping((NodeIdentifierWithPredicates) obj); } abstract boolean equalMapping(NodeIdentifierWithPredicates other); abstract @Nullable Object keyValue(@NonNull QName qname); @Override public final String toString() { return super.toString() + '[' + asMap() + ']'; } @Override public final String toRelativeString(final PathArgument previous) { return super.toRelativeString(previous) + '[' + asMap() + ']'; } @Override final Object writeReplace() { return new NIPv2(this); } } /** * Simple path argument identifying a {@link LeafSetEntryNode} leaf * overall data tree. */ public static final class NodeWithValue<T> extends AbstractPathArgument { private static final long serialVersionUID = -3637456085341738431L; private final @NonNull T value; public NodeWithValue(final QName node, final T value) { super(node); this.value = requireNonNull(value); } public @NonNull T getValue() { return value; } @Override protected int hashCodeImpl() { return 31 * super.hashCodeImpl() + YangInstanceIdentifier.hashCode(value); } @Override @SuppressWarnings("checkstyle:equalsHashCode") public boolean equals(final Object obj) { if (!super.equals(obj)) { return false; } final NodeWithValue<?> other = (NodeWithValue<?>) obj; return Objects.deepEquals(value, other.value); } @Override public String toString() { return super.toString() + '[' + value + ']'; } @Override public String toRelativeString(final PathArgument previous) { return super.toRelativeString(previous) + '[' + value + ']'; } @Override Object writeReplace() { return new NIVv1(this); } } public static final class AugmentationIdentifier implements PathArgument { private static final long serialVersionUID = -8122335594681936939L; private static final LoadingCache<ImmutableSet<QName>, AugmentationIdentifier> CACHE = CacheBuilder.newBuilder() .weakValues().build(new CacheLoader<ImmutableSet<QName>, AugmentationIdentifier>() { @Override public AugmentationIdentifier load(final ImmutableSet<QName> key) { return new AugmentationIdentifier(key); } }); private final @NonNull ImmutableSet<QName> childNames; @Override public QName getNodeType() { // This should rather throw exception than return always null throw new UnsupportedOperationException("Augmentation node has no QName"); } /** * Construct new augmentation identifier using supplied set of possible * child nodes. * * @param childNames * Set of possible child nodes. */ public AugmentationIdentifier(final ImmutableSet<QName> childNames) { this.childNames = requireNonNull(childNames); } /** * Construct new augmentation identifier using supplied set of possible * child nodes. * * @param childNames * Set of possible child nodes. */ public AugmentationIdentifier(final Set<QName> childNames) { this.childNames = ImmutableSet.copyOf(childNames); } /** * Return an AugmentationIdentifier for a particular set of QNames. Unlike the constructor, this factory method * uses a global instance cache, resulting in object reuse for equal inputs. * * @param childNames Set of possible child nodes * @return An {@link AugmentationIdentifier} */ public static @NonNull AugmentationIdentifier create(final ImmutableSet<QName> childNames) { return CACHE.getUnchecked(childNames); } /** * Return an AugmentationIdentifier for a particular set of QNames. Unlike the constructor, this factory method * uses a global instance cache, resulting in object reuse for equal inputs. * * @param childNames Set of possible child nodes * @return An {@link AugmentationIdentifier} */ public static @NonNull AugmentationIdentifier create(final Set<QName> childNames) { final AugmentationIdentifier existing = CACHE.getIfPresent(childNames); return existing != null ? existing : create(ImmutableSet.copyOf(childNames)); } /** * Returns set of all possible child nodes. * * @return set of all possible child nodes. */ public @NonNull Set<QName> getPossibleChildNames() { return childNames; } @Override public String toString() { return "AugmentationIdentifier{" + "childNames=" + childNames + '}'; } @Override public String toRelativeString(final PathArgument previous) { return toString(); } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (!(obj instanceof AugmentationIdentifier)) { return false; } AugmentationIdentifier that = (AugmentationIdentifier) obj; return childNames.equals(that.childNames); } @Override public int hashCode() { return childNames.hashCode(); } @Override @SuppressWarnings("checkstyle:parameterName") public int compareTo(final PathArgument o) { if (!(o instanceof AugmentationIdentifier)) { return -1; } AugmentationIdentifier other = (AugmentationIdentifier) o; Set<QName> otherChildNames = other.getPossibleChildNames(); int thisSize = childNames.size(); int otherSize = otherChildNames.size(); if (thisSize == otherSize) { // Quick Set-based comparison if (childNames.equals(otherChildNames)) { return 0; } // We already know the sets are not equal, but have equal size, hence the sets differ in their elements, // but potentially share a common set of elements. The most consistent way of comparing them is using // total ordering defined by QName's compareTo. Hence convert both sets to lists ordered // by QName.compareTo() and decide on the first differing element. final List<QName> diff = new ArrayList<>(Sets.symmetricDifference(childNames, otherChildNames)); verify(!diff.isEmpty(), "Augmentation identifiers %s and %s report no difference", this, o); diff.sort(QName::compareTo); return childNames.contains(diff.get(0)) ? -1 : 1; } else if (thisSize < otherSize) { return 1; } else { return -1; } } private Object writeReplace() { return new AIv1(this); } } /** * Fluent Builder of Instance Identifier instances. */ public interface InstanceIdentifierBuilder extends Builder<YangInstanceIdentifier> { /** * Adds a {@link PathArgument} to path arguments of resulting instance identifier. * * @param arg A {@link PathArgument} to be added * @return this builder */ @NonNull InstanceIdentifierBuilder node(PathArgument arg); /** * Adds {@link NodeIdentifier} with supplied QName to path arguments of resulting instance identifier. * * @param nodeType QName of {@link NodeIdentifier} which will be added * @return this builder */ @NonNull InstanceIdentifierBuilder node(QName nodeType); /** * Adds {@link NodeIdentifierWithPredicates} with supplied QName and key values to path arguments of resulting * instance identifier. * * @param nodeType QName of {@link NodeIdentifierWithPredicates} which will be added * @param keyValues Map of key components and their respective values for {@link NodeIdentifierWithPredicates} * @return this builder */ @NonNull InstanceIdentifierBuilder nodeWithKey(QName nodeType, Map<QName, Object> keyValues); /** * Adds {@link NodeIdentifierWithPredicates} with supplied QName and key, value. * * @param nodeType QName of {@link NodeIdentifierWithPredicates} which will be added * @param key QName of key which will be added * @param value value of key which will be added * @return this builder */ @NonNull InstanceIdentifierBuilder nodeWithKey(QName nodeType, QName key, Object value); /** * Adds a collection of {@link PathArgument}s to path arguments of resulting instance identifier. * * @param args {@link PathArgument}s to be added * @return this builder * @throws NullPointerException if any of the arguments is null */ @NonNull InstanceIdentifierBuilder append(Collection<? extends PathArgument> args); /** * Adds a collection of {@link PathArgument}s to path arguments of resulting instance identifier. * * @param args {@link PathArgument}s to be added * @return this builder * @throws NullPointerException if any of the arguments is null */ default @NonNull InstanceIdentifierBuilder append(final PathArgument... args) { return append(Arrays.asList(args)); } /** * Builds an {@link YangInstanceIdentifier} with path arguments from this builder. * * @return {@link YangInstanceIdentifier} */ @Override YangInstanceIdentifier build(); } }
package hudson.remoting; import hudson.remoting.Channel.Mode; import hudson.remoting.CommandTransport.CommandReceiver; import org.junit.After; import org.junit.Test; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.io.PrintWriter; import java.io.Serializable; import java.io.StringWriter; import java.util.HashSet; import java.util.Set; import static org.junit.Assert.*; /** * Tests the effect of {@link ClassFilter}. * * <p> * This test code targets each of the known layers where object serialization is used. * Specifically, those are {@link ObjectInputStream} (and subtypes) created in: * * <ul> * <li>{@link Capability#read(InputStream)} * <li>{@link UserRequest#deserialize(Channel, byte[], ClassLoader)}, * <li>{@link ChannelBuilder#makeTransport(InputStream, OutputStream, Mode, Capability)} * <li>{@link AbstractByteArrayCommandTransport#setup(Channel, CommandReceiver)} (TODO) * <li>{@link AbstractSynchronousByteArrayCommandTransport#read()} * </ul> * * @author Kohsuke Kawaguchi */ public class ClassFilterTest implements Serializable { /** * North can defend itself from south but not the other way around. */ private transient InProcessRunner runner; private transient Channel north, south; private static class TestFilter extends ClassFilter { @Override protected boolean isBlacklisted(String name) { return name.contains("Security218"); } } /** * Set up a channel pair where north side is well protected from south side but not the other way around. */ private void setUp() throws Exception { setUp(new InProcessRunner() { @Override protected ChannelBuilder configureNorth() { return super.configureNorth() .withClassFilter(new TestFilter()); } }); } /** * Set up a channel pair with no capacity. In the context of this test, * the lack of chunked encoding triggers a different transport implementation, and the lack of * multi-classloader support triggers {@link UserRequest} to select a different deserialization mechanism. */ private void setUpWithNoCapacity() throws Exception { setUp(new InProcessRunner() { @Override protected ChannelBuilder configureNorth() { return super.configureNorth() .withCapability(Capability.NONE) .withClassFilter(new TestFilter()); } @Override protected ChannelBuilder configureSouth() { return super.configureSouth().withCapability(Capability.NONE); } }); } private void setUp(InProcessRunner runner) throws Exception { this.runner = runner; north = runner.start(); south = runner.south; ATTACKS.clear(); } @After public void tearDown() throws Exception { if (runner!=null) runner.stop(north); } /** * Makes sure {@link Capability#read(InputStream)} rejects unexpected payload. */ @Test public void capabilityRead() throws Exception { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(Mode.TEXT.wrap(baos)); oos.writeObject(new Security218("rifle")); oos.close(); try { Capability.read(new ByteArrayInputStream(baos.toByteArray())); } catch (SecurityException e) { assertEquals("Rejected: "+Security218.class.getName(), e.getMessage()); } } /** * This test case targets object stream created in * {@link UserRequest#deserialize(Channel, byte[], ClassLoader)} with multiclassloader support. */ @Test public void userRequest() throws Exception { setUp(); userRequestTestSequence(); } /** * Variant of {@link #userRequest()} test that targets * {@link UserRequest#deserialize(Channel, byte[], ClassLoader)} *without* multiclassloader support. */ @Test public void userRequest_singleClassLoader() throws Exception { setUpWithNoCapacity(); userRequestTestSequence(); } private void userRequestTestSequence() throws Exception { // control case to prove that an attack will succeed to without filter. fire("caesar", north); assertTrue(ATTACKS.contains("caesar>south")); ATTACKS.clear(); // the test case that should be rejected by a filter. try { fire("napoleon", south); fail("Expected call to fail"); } catch (IOException e) { String msg = toString(e); assertTrue(msg, msg.contains("Rejected: " + Security218.class.getName())); assertTrue(ATTACKS.toString(), ATTACKS.isEmpty()); assertFalse(ATTACKS.contains("napoleon>north")); } } /** * Sends an attack payload over {@link Channel#call(Callable)} */ private void fire(String name, Channel from) throws Exception { final Security218 a = new Security218(name); from.call(new CallableBase<Void, IOException>() { @Override public Void call() throws IOException { a.toString(); // this will ensure 'a' gets sent over return null; } }); } /** * This test case targets command stream created in * {@link AbstractSynchronousByteArrayCommandTransport#read()}, which is used * by {@link ChunkedCommandTransport}. */ @Test public void AbstractSynchronousByteArrayCommandTransport_read() throws Exception { setUp(); commandStreamTestSequence(); } /** * This test case targets command stream created in * {@link ChannelBuilder#makeTransport(InputStream, OutputStream, Mode, Capability)} * by not having the chunking capability. */ @Test public void ChannelBuilder_makeTransport() throws Exception { setUpWithNoCapacity(); commandStreamTestSequence(); } private void commandStreamTestSequence() throws Exception { // control case to prove that an attack will succeed to without filter. north.send(new Security218("eisenhower")); north.syncIO(); // any synchronous RPC call would do assertTrue(ATTACKS.contains("eisenhower>south")); ATTACKS.clear(); // the test case that should be rejected by a filter try { south.send(new Security218("hitler")); north.syncIO(); fail("the receiving end will abort after receiving Security218, so syncIO should fail"); } catch (RequestAbortedException e) { String msg = toString(e); assertTrue(msg, msg.contains("Rejected: " + Security218.class.getName())); assertTrue(ATTACKS.toString(), ATTACKS.isEmpty()); assertFalse(ATTACKS.contains("hitler>north")); } } private String toString(Throwable t) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); t.printStackTrace(pw); return sw.toString(); } /** * An attack payload that leaves a trace on the receiver side if it gets read from the stream. * Extends from {@link Command} to be able to test command stream. */ static class Security218 extends Command implements Serializable { private final String attack; public Security218(String attack) { this.attack = attack; } private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException { ois.defaultReadObject(); ATTACKS.add(attack + ">" + Channel.current().getName()); } @Override protected void execute(Channel channel) { // nothing to do here } } /** * Successful attacks will leave a trace here. */ static Set<String> ATTACKS = new HashSet<String>(); }
package org.talend.dataprep.transformation.service; import static com.jayway.restassured.RestAssured.given; import static com.jayway.restassured.http.ContentType.JSON; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; import static org.skyscreamer.jsonassert.JSONAssert.assertEquals; import org.apache.commons.io.IOUtils; import org.junit.Test; import org.talend.dataprep.transformation.Application; import com.jayway.restassured.path.json.JsonPath; /** * Integration tests on suggestions. */ public class SuggestionTests extends TransformationServiceBaseTests { @Test public void dataSetSuggest() throws Exception { // given final String dataSetMetadata = IOUtils .toString(Application.class.getResourceAsStream("suggestions/dataset_metadata.json")); // when final String response = given() .contentType(JSON) .body(dataSetMetadata) .when() .post("/suggest/dataset") .asString(); // then assertEquals("[]", response, false); } @Test public void emptyColumnSuggest() throws Exception { // when final String response = given() .contentType(JSON) .body("") .when() .post("/suggest/column") .asString(); // then assertEquals("[]", response, false); } @Test public void stringColumnSuggest() throws Exception { // given final String columnMetadata = IOUtils.toString(Application.class.getResourceAsStream("suggestions/string_column.json")); final String expectedSuggestions = IOUtils .toString(Application.class.getResourceAsStream("suggestions/string_column_suggestions.json")); // when final String response = given() .contentType(JSON) .body(columnMetadata) .when() .post("/suggest/column") .asString(); // then assertEquals(expectedSuggestions, response, false); } @Test public void suggestLimit() throws Exception { // given final String columnMetadata = IOUtils.toString(Application.class.getResourceAsStream("suggestions/date_column.json")); // when final String response = given() .contentType(JSON) .body(columnMetadata) .when() .post("/suggest/column?limit=2") .asString(); // then final JsonPath json = JsonPath.from(response); assertThat(json.getList("").size(), is(2)); } @Test public void suggestLimitDefault() throws Exception { // given final String columnMetadata = IOUtils.toString(Application.class.getResourceAsStream("suggestions/date_column.json")); // when final String response = given() .contentType(JSON) .body(columnMetadata) .when() .post("/suggest/column") .asString(); // then final JsonPath json = JsonPath.from(response); assertThat(json.getList("").size(), is(5)); // Default for "limit" is 5. } @Test public void floatColumnSuggest() throws Exception { // given final String columnMetadata = IOUtils.toString(Application.class.getResourceAsStream("suggestions/float_column.json")); final String expectedSuggestions = IOUtils .toString(Application.class.getResourceAsStream("suggestions/float_column_suggestions.json")); // when final String response = given() .contentType(JSON) .body(columnMetadata) .when() .post("/suggest/column") .asString(); // then assertEquals(expectedSuggestions, response, false); } @Test public void integerColumnSuggest() throws Exception { // given final String columnMetadata = IOUtils.toString(Application.class.getResourceAsStream("suggestions/integer_column.json")); final String expectedSuggestions = IOUtils .toString(Application.class.getResourceAsStream("suggestions/integer_column_suggestions.json")); // when final String response = given() .contentType(JSON) .body(columnMetadata) .when() .post("/suggest/column") .asString(); // then assertEquals(expectedSuggestions, response, false); } @Test public void booleanColumnSuggest() throws Exception { // given final String columnMetadata = IOUtils.toString(Application.class.getResourceAsStream("suggestions/boolean_column.json")); final String expectedSuggestions = IOUtils .toString(Application.class.getResourceAsStream("suggestions/boolean_column_suggestions.json")); // when final String response = given() .contentType(JSON) .body(columnMetadata) .when() .post("/suggest/column") .asString(); // then assertEquals(expectedSuggestions, response, false); } @Test public void dateColumnSuggest() throws Exception { // given final String columnMetadata = IOUtils.toString(Application.class.getResourceAsStream("suggestions/date_column.json")); final String expectedSuggestions = IOUtils .toString(Application.class.getResourceAsStream("suggestions/date_column_suggestions.json")); // when final String response = given() .contentType(JSON) .body(columnMetadata) .when() .post("/suggest/column") .asString(); // then assertEquals(expectedSuggestions, response, false); } @Test public void dateColumnSuggestWithStringType() throws Exception { // given final String columnMetadata = IOUtils .toString(Application.class.getResourceAsStream("suggestions/date_column_string_type.json")); final String expectedSuggestions = IOUtils .toString(Application.class.getResourceAsStream("suggestions/date_column_string_type_suggestions.json")); // when final String response = given() .contentType(JSON) .body(columnMetadata) .when() .post("/suggest/column") .asString(); // then assertEquals(expectedSuggestions, response, false); } }
package hudson.remoting; import hudson.remoting.Channel.Mode; import hudson.remoting.CommandTransport.CommandReceiver; import org.jenkinsci.remoting.nio.NioChannelBuilder; import org.junit.After; import org.junit.Test; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.io.PrintWriter; import java.io.Serializable; import java.io.StringWriter; import java.util.HashSet; import java.util.Set; import static org.junit.Assert.*; /** * Tests the effect of {@link ClassFilter}. * * <p> * This test code targets each of the known layers where object serialization is used. * Specifically, those are {@link ObjectInputStream} (and subtypes) created in: * * <ul> * <li>{@link Capability#read(InputStream)} * <li>{@link UserRequest#deserialize(Channel, byte[], ClassLoader)}, * <li>{@link ChannelBuilder#makeTransport(InputStream, OutputStream, Mode, Capability)} * <li>{@link AbstractByteArrayCommandTransport#setup(Channel, CommandReceiver)} * <li>{@link AbstractSynchronousByteArrayCommandTransport#read()} * </ul> * * @author Kohsuke Kawaguchi */ public class ClassFilterTest implements Serializable { /** * North can defend itself from south but not the other way around. */ private transient DualSideChannelRunner runner; private transient Channel north, south; private static class TestFilter extends ClassFilter { @Override protected boolean isBlacklisted(String name) { return name.contains("Security218"); } } /** * Set up a channel pair where north side is well protected from south side but not the other way around. */ private void setUp() throws Exception { setUp(new InProcessRunner() { @Override protected ChannelBuilder configureNorth() { return super.configureNorth() .withClassFilter(new TestFilter()); } }); } /** * Set up a channel pair with no capacity. In the context of this test, * the lack of chunked encoding triggers a different transport implementation, and the lack of * multi-classloader support triggers {@link UserRequest} to select a different deserialization mechanism. */ private void setUpWithNoCapacity() throws Exception { setUp(new InProcessRunner() { @Override protected ChannelBuilder configureNorth() { return super.configureNorth() .withCapability(Capability.NONE) .withClassFilter(new TestFilter()); } @Override protected ChannelBuilder configureSouth() { return super.configureSouth().withCapability(Capability.NONE); } }); } private void setUp(DualSideChannelRunner runner) throws Exception { this.runner = runner; north = runner.start(); south = runner.getOtherSide(); ATTACKS.clear(); } @After public void tearDown() throws Exception { if (runner!=null) runner.stop(north); } /** * Makes sure {@link Capability#read(InputStream)} rejects unexpected payload. */ @Test public void capabilityRead() throws Exception { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(Mode.TEXT.wrap(baos)); oos.writeObject(new Security218("rifle")); oos.close(); try { Capability.read(new ByteArrayInputStream(baos.toByteArray())); } catch (SecurityException e) { assertEquals("Rejected: "+Security218.class.getName(), e.getMessage()); } } /** * This test case targets object stream created in * {@link UserRequest#deserialize(Channel, byte[], ClassLoader)} with multiclassloader support. */ @Test public void userRequest() throws Exception { setUp(); userRequestTestSequence(); } /** * Variant of {@link #userRequest()} test that targets * {@link UserRequest#deserialize(Channel, byte[], ClassLoader)} *without* multiclassloader support. */ @Test public void userRequest_singleClassLoader() throws Exception { setUpWithNoCapacity(); userRequestTestSequence(); } private void userRequestTestSequence() throws Exception { // control case to prove that an attack will succeed to without filter. fire("caesar", north); assertTrue(ATTACKS.contains("caesar>south")); ATTACKS.clear(); // the test case that should be rejected by a filter. try { fire("napoleon", south); fail("Expected call to fail"); } catch (IOException e) { String msg = toString(e); assertTrue(msg, msg.contains("Rejected: " + Security218.class.getName())); assertTrue(ATTACKS.toString(), ATTACKS.isEmpty()); assertFalse(ATTACKS.contains("napoleon>north")); } } /** * Sends an attack payload over {@link Channel#call(Callable)} */ private void fire(String name, Channel from) throws Exception { final Security218 a = new Security218(name); from.call(new CallableBase<Void, IOException>() { @Override public Void call() throws IOException { a.toString(); // this will ensure 'a' gets sent over return null; } }); } /** * This test case targets command stream created in * {@link AbstractSynchronousByteArrayCommandTransport#read()}, which is used * by {@link ChunkedCommandTransport}. */ @Test public void transport_chunking() throws Exception { setUp(); commandStreamTestSequence(); } /** * This test case targets command stream created in * {@link ChannelBuilder#makeTransport(InputStream, OutputStream, Mode, Capability)} * by not having the chunking capability. */ @Test public void transport_non_chunking() throws Exception { setUpWithNoCapacity(); commandStreamTestSequence(); } /** * This test case targets command stream created in * {@link AbstractByteArrayCommandTransport#setup(Channel, CommandReceiver)} */ @Test public void transport_nio() throws Exception { setUp(new NioSocketRunner() { @Override protected NioChannelBuilder configureNorth() { return super.configureNorth() .withClassFilter(new TestFilter()); } }); commandStreamTestSequence(); } private void commandStreamTestSequence() throws Exception { // control case to prove that an attack will succeed to without filter. north.send(new Security218("eisenhower")); north.syncIO(); // any synchronous RPC call would do assertTrue(ATTACKS.contains("eisenhower>south")); ATTACKS.clear(); // the test case that should be rejected by a filter try { south.send(new Security218("hitler")); north.syncIO(); // transport_chunking hangs if this is 'south.syncIO', because somehow south // doesn't notice that the north has aborted and the connection is lost. // this is indicative of a larger problem, but one that's not related to // SECURITY-218 at hand, so I'm going to leave this with 'north.syncIO' // it still achieves the effect of blocking until the command is processed by north, // because the response from south back to north would have to come after Security218 // command. // fail("the receiving end will abort after receiving Security218, so syncIO should fail"); // ... except for NIO, which just discards that command and keeps on // } catch (RequestAbortedException e) { // // other transport kills the connection // String msg = toString(e); // assertTrue(msg, msg.contains("Rejected: " + Security218.class.getName())); } catch (Exception e) { e.printStackTrace(); } // either way, the attack payload should have been discarded before it gets deserialized assertTrue(ATTACKS.toString(), ATTACKS.isEmpty()); assertFalse(ATTACKS.contains("hitler>north")); } private String toString(Throwable t) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); t.printStackTrace(pw); return sw.toString(); } /** * An attack payload that leaves a trace on the receiver side if it gets read from the stream. * Extends from {@link Command} to be able to test command stream. */ static class Security218 extends Command implements Serializable { private final String attack; public Security218(String attack) { this.attack = attack; } private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException { ois.defaultReadObject(); ATTACKS.add(attack + ">" + Channel.current().getName()); } @Override protected void execute(Channel channel) { // nothing to do here } } /** * Successful attacks will leave a trace here. */ static Set<String> ATTACKS = new HashSet<String>(); }
//$HeadURL$ package org.deegree.tools.crs.georeferencing.application; import java.awt.Rectangle; import java.util.ArrayList; import java.util.List; import java.util.Vector; import javax.swing.ButtonModel; import javax.swing.JToggleButton; import javax.vecmath.Point2d; import org.deegree.commons.utils.Triple; import org.deegree.cs.CRS; import org.deegree.cs.exceptions.UnknownCRSException; import org.deegree.geometry.GeometryFactory; import org.deegree.rendering.r3d.model.geometry.GeometryQualityModel; import org.deegree.rendering.r3d.model.geometry.SimpleAccessGeometry; import org.deegree.rendering.r3d.opengl.display.OpenGLEventHandler; import org.deegree.rendering.r3d.opengl.rendering.model.geometry.WorldRenderableObject; import org.deegree.tools.crs.georeferencing.application.listeners.ButtonListener; import org.deegree.tools.crs.georeferencing.application.listeners.Scene2DMouseListener; import org.deegree.tools.crs.georeferencing.application.listeners.Scene2DMouseMotionListener; import org.deegree.tools.crs.georeferencing.application.listeners.Scene2DMouseWheelListener; import org.deegree.tools.crs.georeferencing.application.transformation.AbstractTransformation; import org.deegree.tools.crs.georeferencing.application.transformation.AffineTransformation; import org.deegree.tools.crs.georeferencing.application.transformation.Helmert4Transform; import org.deegree.tools.crs.georeferencing.application.transformation.Polynomial; import org.deegree.tools.crs.georeferencing.communication.PointTableFrame; import org.deegree.tools.crs.georeferencing.communication.checkboxlist.CheckboxListTransformation; import org.deegree.tools.crs.georeferencing.communication.dialog.coordinatejump.CoordinateJumperTextfieldDialog; import org.deegree.tools.crs.georeferencing.communication.dialog.menuitem.OpenWMS; import org.deegree.tools.crs.georeferencing.communication.dialog.menuitem.WMSParameterChooser; import org.deegree.tools.crs.georeferencing.communication.dialog.option.GenericSettingsPanel; import org.deegree.tools.crs.georeferencing.communication.dialog.option.NavigationPanel; import org.deegree.tools.crs.georeferencing.communication.dialog.option.OptionDialog; import org.deegree.tools.crs.georeferencing.communication.dialog.option.SettingsPanel; import org.deegree.tools.crs.georeferencing.model.CheckBoxListModel; import org.deegree.tools.crs.georeferencing.model.ControllerModel; import org.deegree.tools.crs.georeferencing.model.Footprint; import org.deegree.tools.crs.georeferencing.model.RowColumn; import org.deegree.tools.crs.georeferencing.model.Scene2D; import org.deegree.tools.crs.georeferencing.model.mouse.FootprintMouseModel; import org.deegree.tools.crs.georeferencing.model.mouse.GeoReferencedMouseModel; import org.deegree.tools.crs.georeferencing.model.points.Point4Values; import org.deegree.tools.crs.georeferencing.model.points.PointResidual; import org.deegree.tools.crs.georeferencing.model.textfield.CoordinateJumperModel; import org.deegree.tools.rendering.viewer.File3dImporter; /** * * @author <a href="mailto:schmitz@lat-lon.de">Andreas Schmitz</a> * @author last edited by: $Author$ * * @version $Revision$, $Date$ */ public class ApplicationState { public boolean isHorizontalRefGeoref, isHorizontalRefFoot, start, isControlDown, selectedGeoref, selectedFoot, isZoomInGeoref, isZoomInFoot, isZoomOutGeoref, isZoomOutFoot, isInitGeoref, isInitFoot; public JToggleButton buttonZoomInGeoref, buttonZoominFoot, buttonZoomoutGeoref, buttonZoomoutFoot, buttonCoord, buttonPanGeoref; public ButtonModel buttonModel; public Scene2D model; public Scene2DValues sceneValues; public PointTableFrame tablePanel; public ParameterStore store; public CoordinateJumperModel textFieldModel; public GeoReferencedMouseModel mouseGeoRef; public FootprintMouseModel mouseFootprint; public Point2d changePoint; public List<Triple<Point4Values, Point4Values, PointResidual>> mappedPoints; public ControllerModel conModel; public NavigationPanel optionNavPanel; public SettingsPanel optionSettPanel; public OptionDialog optionDialog; // private CoordinateJumperSpinnerDialog jumperDialog; public CoordinateJumperTextfieldDialog jumperDialog; public OpenWMS wmsStartDialog; public WMSParameterChooser wmsParameter; public GenericSettingsPanel optionSettingPanel; public JToggleButton buttonPanFoot; public CheckboxListTransformation checkBoxListTransform; public CheckBoxListModel modelTransformation; public RowColumn rc; public String chosenFile; public CRS sourceCRS, targetCRS; public Footprint footPrint; public OpenGLEventHandler glHandler; private GeometryFactory geom = new GeometryFactory(); /** * Selects one navigation button and deselects the other so that the focus is just on this one button. The * georeferencing for the georeferenced map will be turned off in this case. <br> * If the button is selected already, that will be deselected and there is a horizontal referencing possible again. * * @param t * the toggleButton that should be selected/deselected, not <Code>null</Code>. */ public void selectGeorefToggleButton( JToggleButton t ) { boolean checkSelected = false; buttonModel = t.getModel(); selectedGeoref = buttonModel.isSelected(); if ( selectedGeoref == false ) { isHorizontalRefGeoref = true; } else { checkSelected = true; buttonPanGeoref.setSelected( false ); buttonZoomInGeoref.setSelected( false ); buttonZoomoutGeoref.setSelected( false ); buttonCoord.setSelected( false ); isHorizontalRefGeoref = false; } if ( t == buttonPanGeoref ) { buttonPanGeoref.setSelected( checkSelected ); } else if ( t == buttonZoomInGeoref ) { buttonZoomInGeoref.setSelected( checkSelected ); } else if ( t == buttonZoomoutGeoref ) { buttonZoomoutGeoref.setSelected( checkSelected ); } else if ( t == buttonCoord ) { buttonCoord.setSelected( checkSelected ); if ( checkSelected == true ) { // jumperDialog = new CoordinateJumperSpinnerDialog( view ); jumperDialog = new CoordinateJumperTextfieldDialog( conModel.getView() ); jumperDialog.getCoordinateJumper().setToolTipText( textFieldModel.getTooltipText() ); jumperDialog.addListeners( new ButtonListener( this ) ); jumperDialog.setVisible( true ); } } } /** * Selects one navigation button and deselects the other so that the focus is just on this one button. The * georeferencing for the footprint view will be turned off in this case. <br> * If the button is selected already, that will be deselected and there is a horizontal referencing possible again. * * @param t * the toggleButton that should be selected/deselected, not <Code>null</Code>. */ public void selectFootprintToggleButton( JToggleButton t ) { boolean checkSelected = false; buttonModel = t.getModel(); selectedFoot = buttonModel.isSelected(); if ( selectedFoot == false ) { isHorizontalRefFoot = true; } else { checkSelected = true; buttonPanFoot.setSelected( false ); buttonZoominFoot.setSelected( false ); buttonZoomoutFoot.setSelected( false ); isHorizontalRefFoot = false; } if ( t == buttonPanFoot ) { buttonPanFoot.setSelected( checkSelected ); } else if ( t == buttonZoominFoot ) { buttonZoominFoot.setSelected( checkSelected ); } else if ( t == buttonZoomoutFoot ) { buttonZoomoutFoot.setSelected( checkSelected ); } } /** * Removes sample points in panels and the table. * * @param tableRows * that should be removed, could be <Code>null</Code> */ public void removeFromMappedPoints( int[] tableRows ) { for ( int i = tableRows.length - 1; i >= 0; i mappedPoints.remove( tableRows[i] ); } } /** * Initializes the georeferenced scene. */ public void initGeoReferencingScene( Scene2D scene2d ) { isInitGeoref = true; if ( isInitFoot ) { tablePanel.getSaveButton().setEnabled( true ); tablePanel.getLoadButton().setEnabled( true ); } mouseGeoRef = new GeoReferencedMouseModel(); scene2d.init( sceneValues ); targetCRS = scene2d.getCRS(); init(); Controller.removeListeners( conModel.getPanel() ); conModel.getPanel().addScene2DMouseListener( new Scene2DMouseListener( this ) ); conModel.getPanel().addScene2DMouseMotionListener( new Scene2DMouseMotionListener( this ) ); conModel.getPanel().addScene2DMouseWheelListener( new Scene2DMouseWheelListener( this ) ); } /** * Initializes the computing and the painting of the maps. */ void init() { if ( model != null ) { sceneValues.setGeorefDimension( new Rectangle( conModel.getPanel().getWidth(), conModel.getPanel().getHeight() ) ); conModel.getPanel().setImageDimension( sceneValues.getGeorefDimension() ); conModel.getPanel().setImageToDraw( model.generateSubImage( sceneValues.getGeorefDimension() ) ); conModel.getPanel().updatePoints( sceneValues ); conModel.getPanel().repaint(); } } /** * Initializes the footprint scene. */ public void initFootprintScene( String filePath ) { isInitFoot = true; if ( isInitGeoref ) { tablePanel.getSaveButton().setEnabled( true ); tablePanel.getLoadButton().setEnabled( true ); } this.footPrint = new Footprint( sceneValues, geom ); Controller.removeListeners( conModel.getFootPanel() ); conModel.getFootPanel().addScene2DMouseListener( new Scene2DMouseListener( this ) ); conModel.getFootPanel().addScene2DMouseMotionListener( new Scene2DMouseMotionListener( this ) ); conModel.getFootPanel().addScene2DMouseWheelListener( new Scene2DMouseWheelListener( this ) ); mouseFootprint = new FootprintMouseModel(); List<WorldRenderableObject> rese = File3dImporter.open( conModel.getView(), filePath ); sourceCRS = null; for ( WorldRenderableObject res : rese ) { sourceCRS = res.getBbox().getCoordinateSystem(); glHandler.addDataObjectToScene( res ); } List<float[]> geometryThatIsTaken = new ArrayList<float[]>(); for ( GeometryQualityModel g : File3dImporter.gm ) { ArrayList<SimpleAccessGeometry> h = g.getQualityModelParts(); boolean isfirstOccurrence = false; float minimalZ = 0; for ( SimpleAccessGeometry b : h ) { float[] a = b.getHorizontalGeometries( b.getGeometry() ); if ( a != null ) { if ( isfirstOccurrence == false ) { minimalZ = a[2]; geometryThatIsTaken.add( a ); isfirstOccurrence = true; } else { if ( minimalZ < a[2] ) { } else { geometryThatIsTaken.remove( geometryThatIsTaken.size() - 1 ); minimalZ = a[2]; geometryThatIsTaken.add( a ); } } } } } footPrint.generateFootprints( geometryThatIsTaken ); sceneValues.setDimensionFootpanel( new Rectangle( conModel.getFootPanel().getBounds().width, conModel.getFootPanel().getBounds().height ) ); conModel.getFootPanel().updatePoints( sceneValues ); conModel.getFootPanel().setPolygonList( footPrint.getWorldCoordinateRingList(), sceneValues ); conModel.getFootPanel().repaint(); } public void updateResidualsWithLastAbstractPoint() { if ( conModel.getFootPanel().getLastAbstractPoint() != null && conModel.getPanel().getLastAbstractPoint() != null ) { mappedPoints.add( new Triple<Point4Values, Point4Values, PointResidual>( conModel.getFootPanel().getLastAbstractPoint(), conModel.getPanel().getLastAbstractPoint(), null ) ); updateMappedPoints(); updateResiduals( conModel.getTransformationType() ); // remove the last element...should be the before inserted value mappedPoints.remove( mappedPoints.size() - 1 ); } else { updateMappedPoints(); updateResiduals( conModel.getTransformationType() ); } } /** * Adds the <Code>AbstractPoint</Code>s to a map, if specified. * * @param mappedPointKey * @param mappedPointValue */ void addToMappedPoints( Point4Values mappedPointKey, Point4Values mappedPointValue, PointResidual residual ) { if ( mappedPointKey != null && mappedPointValue != null ) { this.mappedPoints.add( new Triple<Point4Values, Point4Values, PointResidual>( mappedPointKey, mappedPointValue, residual ) ); } } /** * Updates the rowNumber of the remained mappedPoints */ private void updateMappedPoints() { List<Triple<Point4Values, Point4Values, PointResidual>> temp = new ArrayList<Triple<Point4Values, Point4Values, PointResidual>>(); int counter = 0; for ( Triple<Point4Values, Point4Values, PointResidual> p : mappedPoints ) { System.out.println( "[Controller] before: " + p ); Point4Values f = new Point4Values( p.first.getOldValue(), p.first.getInitialValue(), p.first.getNewValue(), p.first.getWorldCoords(), new RowColumn( counter, p.first.getRc().getColumnX(), p.first.getRc().getColumnY() ) ); Point4Values s = new Point4Values( p.second.getOldValue(), p.second.getInitialValue(), p.second.getNewValue(), p.second.getWorldCoords(), new RowColumn( counter++, p.second.getRc().getColumnX(), p.second.getRc().getColumnY() ) ); if ( p.third != null ) { PointResidual r = new PointResidual( p.third.x, p.third.y ); System.out.println( "\n[Controller] after: " + s ); temp.add( new Triple<Point4Values, Point4Values, PointResidual>( f, s, r ) ); } else { temp.add( new Triple<Point4Values, Point4Values, PointResidual>( f, s, null ) ); } } mappedPoints.clear(); mappedPoints.addAll( temp ); } /** * Updates the model of the table to show the residuals of the already stored mappedPoints. It is based on the * Helmert transformation. * * @param type * */ public void updateResiduals( AbstractTransformation.TransformationType type ) { try { AbstractTransformation t = determineTransformationType( type ); PointResidual[] r = t.calculateResiduals(); if ( r != null ) { Vector<Vector<? extends Double>> data = new Vector<Vector<? extends Double>>(); int counter = 0; for ( Triple<Point4Values, Point4Values, PointResidual> point : mappedPoints ) { Vector<Double> element = new Vector<Double>( 6 ); element.add( point.second.getWorldCoords().x ); element.add( point.second.getWorldCoords().y ); element.add( point.first.getWorldCoords().x ); element.add( point.first.getWorldCoords().y ); element.add( r[counter].x ); element.add( r[counter].y ); data.add( element ); point.third = r[counter++]; } tablePanel.getModel().setDataVector( data, tablePanel.getColumnNamesAsVector() ); tablePanel.getModel().fireTableDataChanged(); } } catch ( UnknownCRSException e ) { // TODO Auto-generated catch block e.printStackTrace(); } } /** * Removes everything after a complete deletion of the points. */ public void removeAllFromMappedPoints() { mappedPoints = new ArrayList<Triple<Point4Values, Point4Values, PointResidual>>(); tablePanel.removeAllRows(); conModel.getPanel().removeAllFromSelectedPoints(); conModel.getFootPanel().removeAllFromSelectedPoints(); conModel.getFootPanel().setLastAbstractPoint( null, null, null ); conModel.getPanel().setPolygonList( null, null ); conModel.getPanel().setLastAbstractPoint( null, null, null ); conModel.getPanel().repaint(); conModel.getFootPanel().repaint(); reset(); } /** * Resets the focus of the panels and the startPanel. */ public void reset() { conModel.getPanel().setFocus( false ); conModel.getFootPanel().setFocus( false ); start = false; } /** * Determines the transformationMethod by means of the type. * * @param type * of the transformationMethod, not <Code>null</Code>. * @return the transformationMethod to be used. * @throws UnknownCRSException */ public AbstractTransformation determineTransformationType( AbstractTransformation.TransformationType type ) throws UnknownCRSException { AbstractTransformation t = null; switch ( type ) { case Polynomial: t = new Polynomial( mappedPoints, footPrint, sceneValues, targetCRS, targetCRS, conModel.getOrder() ); break; case Helmert_4: t = new Helmert4Transform( mappedPoints, footPrint, sceneValues, targetCRS, conModel.getOrder() ); break; case Affine: t = new AffineTransformation( mappedPoints, footPrint, sceneValues, targetCRS, targetCRS, conModel.getOrder() ); break; } return t; } /** * Updates the panels that are responsible for drawing the georeferenced points so that the once clicked points are * drawn into the right position. */ public void updateDrawingPanels() { List<Point4Values> panelList = new ArrayList<Point4Values>(); List<Point4Values> footPanelList = new ArrayList<Point4Values>(); for ( Triple<Point4Values, Point4Values, PointResidual> p : mappedPoints ) { panelList.add( p.second ); footPanelList.add( p.first ); } conModel.getPanel().setSelectedPoints( panelList, sceneValues ); conModel.getFootPanel().setSelectedPoints( footPanelList, sceneValues ); conModel.getPanel().repaint(); conModel.getFootPanel().repaint(); } /** * Sets values to the JTableModel. */ public void setValues() { conModel.getFootPanel().addToSelectedPoints( conModel.getFootPanel().getLastAbstractPoint() ); conModel.getPanel().addToSelectedPoints( conModel.getPanel().getLastAbstractPoint() ); if ( mappedPoints != null && mappedPoints.size() >= 1 ) { addToMappedPoints( conModel.getFootPanel().getLastAbstractPoint(), conModel.getPanel().getLastAbstractPoint(), null ); updateResiduals( conModel.getTransformationType() ); } else { addToMappedPoints( conModel.getFootPanel().getLastAbstractPoint(), conModel.getPanel().getLastAbstractPoint(), null ); } conModel.getFootPanel().setLastAbstractPoint( null, null, null ); conModel.getPanel().setLastAbstractPoint( null, null, null ); } }
package javaslang.concurrent; import javaslang.control.None; import javaslang.control.Try; import org.assertj.core.api.Assertions; import org.junit.Test; import static org.assertj.core.api.StrictAssertions.assertThat; public class FutureTest { @Test public void shouldInterruptLockedFuture() { final Future<?> future = Future.of(() -> { final Object lock = new Object(); synchronized (lock) { lock.wait(); } return null; }); future.onComplete(r -> Assertions.fail("future should lock forever")); int count = 0; while (!future.isCompleted() && !future.isCancelled()) { Try.run(() -> Thread.sleep(100)); if (++count > 3) { future.cancel(); } } assertCancelled(future); } // checks the invariant for cancelled state void assertCancelled(Future<?> future) { assertThat(future.isCancelled()).isTrue(); assertThat(future.isCompleted()).isFalse(); assertThat(future.getValue()).isEqualTo(None.instance()); } }
package org.deeplearning4j.datasets.iterator.impl; import org.datavec.image.loader.CifarLoader; import org.datavec.image.transform.ImageTransform; import org.deeplearning4j.datasets.datavec.RecordReaderDataSetIterator; import org.nd4j.linalg.dataset.DataSet; import java.util.List; public class CifarDataSetIterator extends RecordReaderDataSetIterator { protected static int height = 32; protected static int width = 32; protected static int channels = 3; protected static CifarLoader loader; protected int totalExamples = CifarLoader.NUM_TRAIN_IMAGES; // TODO use maxNumBatches and batchNum instead protected int numExamples = totalExamples; protected int exampleCount = 0; protected boolean overshot = false; protected ImageTransform imageTransform; protected static boolean preProcessCifar = false; protected static boolean train = true; /** * Loads images with given batchSize, numExamples, & version returned by the generator. */ public CifarDataSetIterator(int batchSize, int numExamples, boolean train) { this(batchSize, numExamples, new int[]{height, width, channels}, CifarLoader.NUM_LABELS, null, preProcessCifar, train); } /** * Loads images with given batchSize, numExamples, & imgDim returned by the generator. */ public CifarDataSetIterator(int batchSize, int numExamples, int[] imgDim) { this(batchSize, numExamples, imgDim, CifarLoader.NUM_LABELS, null, preProcessCifar, train); } /** * Loads images with given batchSize, numExamples, imgDim & version returned by the generator. */ public CifarDataSetIterator(int batchSize, int numExamples, int[] imgDim, boolean train) { this(batchSize, numExamples, imgDim, CifarLoader.NUM_LABELS, null, preProcessCifar, train); } /** * Loads images with given batchSize & numExamples returned by the generator. */ public CifarDataSetIterator(int batchSize, int numExamples) { this(batchSize, numExamples, new int[]{height, width, channels}, CifarLoader.NUM_LABELS, null, preProcessCifar, train); } /** * Loads images with given batchSize & imgDim returned by the generator. */ public CifarDataSetIterator(int batchSize, int[] imgDim) { this(batchSize, CifarLoader.NUM_TRAIN_IMAGES, imgDim, CifarLoader.NUM_LABELS, null, preProcessCifar, train); } /** * Loads images with given batchSize, numExamples, imgDim & version returned by the generator. */ public CifarDataSetIterator(int batchSize, int numExamples, int[] imgDim, boolean preProcessCifar, boolean train) { this(batchSize, numExamples, imgDim, CifarLoader.NUM_LABELS, null, preProcessCifar, train); } /** * Create Cifar data specific iterator * * @param batchSize the batch size of the examples * @param imgDim an array of height, width and channels * @param numExamples the overall number of examples * @param imageTransform the transformation to apply to the images * @param preProcessCifar preprocess cifar * @param train true if use training set and false for test */ public CifarDataSetIterator(int batchSize, int numExamples, int[] imgDim, int numPossibleLables, ImageTransform imageTransform, boolean preProcessCifar, boolean train) { super(null, batchSize, 1, numExamples); this.loader = new CifarLoader(imgDim[0], imgDim[1], imgDim[2], imageTransform, train, preProcessCifar); this.totalExamples = train ? totalExamples : CifarLoader.NUM_TEST_IMAGES; this.numExamples = numExamples > totalExamples ? totalExamples : numExamples; this.numPossibleLabels = numPossibleLables; this.preProcessCifar = preProcessCifar; this.imageTransform = imageTransform; this.train = train; } // TODO add transform - random flip when loading batches @Override public DataSet next(int batchSize) { if(useCurrent) { useCurrent = false; return last; } DataSet result; int i = 1; if (preProcessCifar) { if (train && batchNum == 0 && i <= CifarLoader.TRAINFILENAMES.length) i++; result = loader.next(batchSize, i, batchNum); } else result = loader.next(batchSize); exampleCount += batchSize; batchNum++; if(result == null || (maxNumBatches > -1 && batchNum >= maxNumBatches)) { overshot = true; return last; } if(preProcessor != null) preProcessor.preProcess(result); last = result; if ( loader.getLabels() != null) result.setLabelNames(loader.getLabels()); return result; } @Override public boolean hasNext() { return exampleCount < numExamples && (maxNumBatches == -1 || batchNum < maxNumBatches) && !overshot; } @Override public int totalExamples() { return totalExamples; } @Override public void reset() { exampleCount = 0; overshot = false; batchNum = 0; loader.reset(); } @Override public List<String> getLabels(){ return loader.getLabels(); } }
package leshan.interop; import static org.junit.Assert.*; import java.util.HashMap; import leshan.client.californium.impl.CaliforniumClientIdentifier; import leshan.client.request.DeregisterRequest; import leshan.client.request.RegisterRequest; import leshan.client.response.OperationResponse; import leshan.server.client.Client; import org.eclipse.californium.core.coap.CoAP.ResponseCode; import org.junit.Test; public class RegistrationTest extends BaseTest { @Test public void LightweightM2M_1_0_int_101_Initial_Registration() { System.out.println(" OperationResponse response = client.send(new RegisterRequest(clientEndpoint, new HashMap<String, String>())); System.out.println("Registration result: " + response); // verify registration on server side Client regClient = server.getClientRegistry().get(clientEndpoint); assertNotNull(regClient); System.out.println("Registered client: " + regClient); // verify the response received by the client assertEquals(ResponseCode.CREATED, response.getResponseCode()); String location = ((CaliforniumClientIdentifier) response.getClientIdentifier()).getLocation().split("/")[2]; assertEquals(regClient.getRegistrationId(), location); } @Test public void LightweightM2M_1_0_int_103_Deregistration() { System.out.println(" // client registration OperationResponse regResponse = client.send(new RegisterRequest(clientEndpoint, new HashMap<String, String>())); Client regClient = server.getClientRegistry().get(clientEndpoint); assertNotNull(regClient); OperationResponse deregResponse = client.send(new DeregisterRequest(regResponse.getClientIdentifier())); System.out.println("Deregistration result: " + deregResponse); // verify the response received by the client assertEquals(ResponseCode.DELETED, deregResponse.getResponseCode()); // verify the registration on server side regClient = server.getClientRegistry().get(clientEndpoint); assertNull(regClient); } }
package org.hisp.dhis.dataset; import com.google.common.collect.Lists; import org.hisp.dhis.dataapproval.DataApprovalService; import org.hisp.dhis.dataelement.DataElement; import org.hisp.dhis.dataelement.DataElementCategoryOptionCombo; import org.hisp.dhis.dataentryform.DataEntryForm; import org.hisp.dhis.organisationunit.OrganisationUnit; import org.hisp.dhis.period.Period; import org.hisp.dhis.period.PeriodType; import org.hisp.dhis.query.QueryParserException; import org.hisp.dhis.user.CurrentUserService; import org.hisp.dhis.user.User; import org.springframework.transaction.annotation.Transactional; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; /** * @author Lars Helge Overland */ @Transactional public class DefaultDataSetService implements DataSetService { // Dependencies private DataSetStore dataSetStore; public void setDataSetStore( DataSetStore dataSetStore ) { this.dataSetStore = dataSetStore; } private LockExceptionStore lockExceptionStore; public void setLockExceptionStore( LockExceptionStore lockExceptionStore ) { this.lockExceptionStore = lockExceptionStore; } private CurrentUserService currentUserService; public void setCurrentUserService( CurrentUserService currentUserService ) { this.currentUserService = currentUserService; } private DataApprovalService dataApprovalService; public void setDataApprovalService( DataApprovalService dataApprovalService ) { this.dataApprovalService = dataApprovalService; } // DataSet @Override public int addDataSet( DataSet dataSet ) { dataSetStore.save( dataSet ); return dataSet.getId(); } @Override public void updateDataSet( DataSet dataSet ) { dataSetStore.update( dataSet ); } @Override public void deleteDataSet( DataSet dataSet ) { dataSetStore.delete( dataSet ); } @Override public DataSet getDataSet( int id ) { return dataSetStore.get( id ); } @Override public DataSet getDataSet( String uid ) { return dataSetStore.getByUid( uid ); } @Override public DataSet getDataSetNoAcl( String uid ) { return dataSetStore.getByUidNoAcl( uid ); } @Override public List<DataSet> getDataSetsByDataEntryForm( DataEntryForm dataEntryForm ) { return dataSetStore.getDataSetsByDataEntryForm( dataEntryForm ); } @Override public List<DataSet> getAllDataSets() { return dataSetStore.getAll(); } @Override public List<DataSet> getDataSetsByPeriodType( PeriodType periodType ) { return dataSetStore.getDataSetsByPeriodType( periodType ); } @Override public List<DataSet> getDataSetsByUid( Collection<String> uids ) { return dataSetStore.getByUid( uids ); } @Override public List<DataSet> getDataSetsForMobile( OrganisationUnit source ) { return dataSetStore.getDataSetsForMobile( source ); } @Override public List<DataSet> getUserDataSets() { return getUserDataSets( currentUserService.getCurrentUser() ); } @Override public List<DataSet> getUserDataSets( User user ) { if ( user == null || user.isSuper() ) { return getAllDataSets(); } return Lists.newArrayList( user.getUserCredentials().getAllDataSets() ); } // DataSet LockExceptions @Override public int addLockException( LockException lockException ) { lockExceptionStore.save( lockException ); return lockException.getId(); } @Override public void updateLockException( LockException lockException ) { lockExceptionStore.update( lockException ); } @Override public void deleteLockException( LockException lockException ) { lockExceptionStore.delete( lockException ); } @Override public LockException getLockException( int id ) { return lockExceptionStore.get( id ); } @Override public int getLockExceptionCount() { return lockExceptionStore.getCount(); } @Override public List<LockException> getAllLockExceptions() { return lockExceptionStore.getAll(); } @Override public List<LockException> getLockExceptionsBetween( int first, int max ) { return lockExceptionStore.getAllOrderedName( first, max ); } @Override public List<LockException> getLockExceptionCombinations() { return lockExceptionStore.getCombinations(); } @Override public void deleteLockExceptionCombination( DataSet dataSet, Period period ) { lockExceptionStore.deleteCombination( dataSet, period ); } @Override public void deleteLockExceptionCombination( DataSet dataSet, Period period, OrganisationUnit organisationUnit ) { lockExceptionStore.deleteCombination( dataSet, period, organisationUnit ); } @Override public boolean isLocked( DataSet dataSet, Period period, OrganisationUnit organisationUnit, Date now ) { return dataSet.isLocked( period, now ) && lockExceptionStore.getCount( dataSet, period, organisationUnit ) == 0L; } @Override public boolean isLocked( DataSet dataSet, Period period, OrganisationUnit organisationUnit, DataElementCategoryOptionCombo attributeOptionCombo, Date now ) { return isLocked( dataSet, period, organisationUnit, now ) || dataApprovalService.isApproved( dataSet.getWorkflow(), period, organisationUnit, attributeOptionCombo ); } @Override public boolean isLocked( DataSet dataSet, Period period, OrganisationUnit organisationUnit, DataElementCategoryOptionCombo attributeOptionCombo, Date now, boolean useOrgUnitChildren ) { if ( !useOrgUnitChildren ) { return isLocked( dataSet, period, organisationUnit, attributeOptionCombo, now ); } if ( organisationUnit == null || !organisationUnit.hasChild() ) { return false; } for ( OrganisationUnit child : organisationUnit.getChildren() ) { if ( isLocked( dataSet, period, child, attributeOptionCombo, now ) ) { return true; } } return false; } @Override public boolean isLocked( DataElement dataElement, Period period, OrganisationUnit organisationUnit, DataElementCategoryOptionCombo attributeOptionCombo, Date now ) { now = now != null ? now : new Date(); boolean expired = dataElement.isExpired( period, now ); if ( expired && lockExceptionStore.getCount( dataElement, period, organisationUnit ) == 0L ) { return true; } DataSet dataSet = dataElement.getApprovalDataSet(); if ( dataSet == null ) { return false; } return dataApprovalService.isApproved( dataSet.getWorkflow(), period, organisationUnit, attributeOptionCombo ); } @Override public List<LockException> filterLockExceptions( List<String> filters ) { List<LockException> lockExceptions = getAllLockExceptions(); Set<LockException> returnList = new HashSet<>( lockExceptions ); for ( String filter : filters ) { String[] split = filter.split( ":" ); if ( split.length != 3 ) { throw new QueryParserException( "Invalid filter: " + filter ); } if ( "organisationUnit.id".equalsIgnoreCase( split[0] ) ) { returnList.retainAll( getLockExceptionByOrganisationUnit( split[1], split[2], returnList ) ); } if ( "dataSet.id".equalsIgnoreCase( split[0] ) ) { returnList.retainAll( getLockExceptionByDataSet( split[1], split[2], returnList ) ); } if ( "period".equalsIgnoreCase( split[0] ) ) { returnList.retainAll( getLockExceptionByPeriod( split[1], split[2], returnList ) ); } } return new ArrayList<>( returnList ); } private List<LockException> getLockExceptionByOrganisationUnit( String operator, String orgUnitIds, Collection<LockException> lockExceptions ) { List<String> ids = parseIdFromString( orgUnitIds, operator ); return lockExceptions.stream() .filter( lockException -> ids.contains( lockException.getOrganisationUnit().getUid() ) ) .collect( Collectors.toList() ); } private List<LockException> getLockExceptionByDataSet( String operator, String dataSetIds, Collection<LockException> lockExceptions ) { List<String> ids = parseIdFromString( dataSetIds, operator ); return lockExceptions.stream() .filter( lockException -> ids.contains( lockException.getDataSet().getUid() ) ) .collect( Collectors.toList() ); } private List<LockException> getLockExceptionByPeriod( String operator, String periods, Collection<LockException> lockExceptions ) { List<String> ids = parseIdFromString( periods, operator ); return lockExceptions.stream() .filter( lockException -> ids.contains( lockException.getPeriod().getIsoDate() ) ) .collect( Collectors.toList() ); } private List<String> parseIdFromString( String input, String operator ) { List<String> ids = new ArrayList<>(); if ( "in".equalsIgnoreCase( operator ) ) { if ( input.startsWith( "[" ) && input.endsWith( "]" ) ) { String[] split = input.substring( 1, input.length() - 1 ).split( "," ); Collections.addAll( ids, split ); } else { throw new QueryParserException( "Invalid query: " + input ); } } else if ( "eq".equalsIgnoreCase( operator ) ) { ids.add( input ); } return ids; } }
package org.amc.game.chess; import static org.junit.Assert.*; import static org.amc.game.chess.ChessBoard.Coordinate.*; import org.junit.After; import org.junit.Before; import org.junit.Test; public class CastlingTest { private ChessBoard board; private KingPiece whiteKing; private RookPiece whiteLeftRook; private RookPiece whiteRightRook; private Location whiteKingStartPosition; private Location whiteLeftRookStartPosition; private Location whiteRightRookStartPosition; private ChessGame chessGame; private CastlingRule gameRule; private Player whitePlayer; private Player blackPlayer; @Before public void setUp() throws Exception { board=new ChessBoard(); whitePlayer=new HumanPlayer("White Player",Colour.WHITE); blackPlayer=new HumanPlayer("Black Player", Colour.BLACK); chessGame=new ChessGame(board,whitePlayer,blackPlayer); gameRule=new CastlingRule(); whiteKing=new KingPiece(Colour.WHITE); whiteLeftRook=new RookPiece(Colour.WHITE); whiteRightRook=new RookPiece(Colour.WHITE); whiteKingStartPosition=new Location(E,1); whiteLeftRookStartPosition=new Location(A,1); whiteRightRookStartPosition=new Location(H,1); board.putPieceOnBoardAt(whiteKing, whiteKingStartPosition); board.putPieceOnBoardAt(whiteRightRook, whiteRightRookStartPosition); board.putPieceOnBoardAt(whiteLeftRook, whiteLeftRookStartPosition); } @After public void tearDown() throws Exception { } @Test public void testLeftSideCastling(){ Location castlingKingLocation=new Location(C,1); assertTrue(whiteKing.isCastlingMove(new Move(whiteKingStartPosition,castlingKingLocation))); assertTrue(gameRule.isCastlingMove(board,new Move(whiteKingStartPosition,castlingKingLocation))); } @Test public void testRightSideCastling(){ Location castlingKingLocation=new Location(G,1); assertTrue(whiteKing.isCastlingMove(new Move(whiteKingStartPosition,castlingKingLocation))); assertTrue(gameRule.isCastlingMove(board,new Move(whiteKingStartPosition,castlingKingLocation))); } @Test public void testKingMovedCastlingNotAllowed(){ Location castlingKingLocation=new Location(G,1); whiteKing.moved(); assertFalse(whiteKing.isCastlingMove(new Move(whiteKingStartPosition,castlingKingLocation))); assertFalse(gameRule.isCastlingMove(board,new Move(whiteKingStartPosition,castlingKingLocation))); } @Test public void testRightRookMovedCastlingNotAllowed(){ Location castlingKingLocation=new Location(G,1); whiteRightRook.moved(); assertFalse(gameRule.isCastlingMove(board,new Move(whiteKingStartPosition,castlingKingLocation))); } @Test public void testLeftRookMovedCastlingNotAllowed(){ Location castlingKingLocation=new Location(C,1); whiteLeftRook.moved(); assertFalse(gameRule.isCastlingMove(board,new Move(whiteKingStartPosition,castlingKingLocation))); } @Test public void testKingHasMoveOneSquare(){ Location castlingKingLocation=new Location(F,1); assertFalse(gameRule.isCastlingMove(board,new Move(whiteKingStartPosition,castlingKingLocation))); } @Test public void testKingHasTwoSquareUpAndAcrossTheBoard(){ Location castlingKingLocation=new Location(G,3); assertFalse(gameRule.isCastlingMove(board,new Move(whiteKingStartPosition,castlingKingLocation))); } @Test public void testNotLeftRook(){ Location castlingKingLocation=new Location(C,1); board.putPieceOnBoardAt(new BishopPiece(Colour.WHITE), whiteLeftRookStartPosition); assertFalse(gameRule.isCastlingMove(board,new Move(whiteKingStartPosition,castlingKingLocation))); } @Test public void testNotRightRook(){ Location castlingKingLocation=new Location(G,1); board.putPieceOnBoardAt(new BishopPiece(Colour.WHITE), whiteRightRookStartPosition); assertFalse(gameRule.isCastlingMove(board,new Move(whiteKingStartPosition,castlingKingLocation))); } @Test public void testSquareBetweenKingAndRightRookNotEmpty(){ Location castlingKingLocation=new Location(G,1); board.putPieceOnBoardAt(new BishopPiece(Colour.WHITE), new Location(F,1)); assertFalse(gameRule.isCastlingMove(board,new Move(whiteKingStartPosition,castlingKingLocation))); board.putPieceOnBoardAt(new BishopPiece(Colour.WHITE), new Location(G,1)); board.removePieceOnBoardAt(new Location(F,1)); assertFalse(gameRule.isCastlingMove(board,new Move(whiteKingStartPosition,castlingKingLocation))); } @Test public void testSquareBetweenKingAndLeftRookNotEmpty(){ Location castlingKingLocation=new Location(C,1); board.putPieceOnBoardAt(new BishopPiece(Colour.WHITE), new Location(B,1)); assertFalse(gameRule.isCastlingMove(board,new Move(whiteKingStartPosition,castlingKingLocation))); board.putPieceOnBoardAt(new BishopPiece(Colour.WHITE), new Location(C,1)); board.removePieceOnBoardAt(new Location(B,1)); assertFalse(gameRule.isCastlingMove(board,new Move(whiteKingStartPosition,castlingKingLocation))); board.removePieceOnBoardAt(new Location(C,1)); board.putPieceOnBoardAt(new BishopPiece(Colour.WHITE), new Location(D,1)); assertFalse(gameRule.isCastlingMove(board,new Move(whiteKingStartPosition,castlingKingLocation))); } @Test public void testRightRookMovesToCastlePosition() throws InvalidMoveException{ Location castlingKingLocation=new Location(G,1); Move whiteKingCastleMove=new Move(whiteKingStartPosition, castlingKingLocation); chessGame.move(whitePlayer, whiteKingCastleMove); } @Test public void testLeftRookMovesToCastlePosition() throws InvalidMoveException{ Location castlingKingLocation=new Location(C,1); Move whiteKingCastleMove=new Move(whiteKingStartPosition, castlingKingLocation); chessGame.move(whitePlayer, whiteKingCastleMove); } }
package tabletop2; import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.Map; import java.util.Random; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.swing.JOptionPane; import javax.xml.XMLConstants; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.Result; import javax.xml.transform.Source; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.TransformerFactoryConfigurationError; import javax.xml.transform.dom.DOMResult; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import javax.xml.validation.Schema; import javax.xml.validation.SchemaFactory; import javax.xml.validation.Validator; import org.w3c.dom.Document; import org.w3c.dom.DocumentFragment; import org.w3c.dom.Element; import org.w3c.dom.NamedNodeMap; import org.xml.sax.ErrorHandler; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import tabletop2.util.MyRigidBodyControl; import tabletop2.util.MySliderJoint; import com.jme3.bounding.BoundingVolume; import com.jme3.bullet.BulletAppState; import com.jme3.bullet.collision.shapes.BoxCollisionShape; import com.jme3.bullet.joints.SixDofJoint; import com.jme3.input.InputManager; import com.jme3.input.KeyInput; import com.jme3.input.controls.ActionListener; import com.jme3.input.controls.KeyTrigger; import com.jme3.math.ColorRGBA; import com.jme3.math.FastMath; import com.jme3.math.Quaternion; import com.jme3.math.Transform; import com.jme3.math.Vector3f; import com.jme3.scene.Node; import com.jme3.scene.Spatial; public class Table implements ActionListener { private static final Logger logger = Logger.getLogger(Table.class.getName()); private static final String SCHEMA_DEEP_FNAME = "tablesetup/schema/deep.xsd"; private static final String SCHEMA_SHALLOW_FNAME = "tablesetup/schema/shallow.xsd"; private static Random random = new Random(5566); private String name; private boolean enabled = true; private Node rootNode; private Factory factory; private BulletAppState bulletAppState; private Inventory inventory; private Node robotLocationNode; private float tableWidth = 20; private float tableDepth = 12; private static final float TABLE_HEIGHT = 4; private Spatial tableSpatial = null; private int idSN = 0; private HashSet<String> uniqueIds = new HashSet<String>(); private HashSet<String> xmlFnameLoaded = new HashSet<>(); private HashMap<String, Element> defs = new HashMap<>(); private HashMap<String, HashMap<String, String>> defVars = new HashMap<>(); public Table(String name, MainApp app, Node robotLocationNode) { this.name = name; rootNode = app.getRootNode(); factory = app.getFactory(); bulletAppState = app.getBulletAppState(); inventory = app.getInventory(); this.robotLocationNode = robotLocationNode; } public float getWidth() { return tableWidth; } public float getDepth() { return tableDepth; } public ColorRGBA getColor() { return ColorRGBA.White; } public BoundingVolume getWorldBound() { return tableSpatial.getWorldBound(); } public void setEnabled(boolean v) { enabled = v; } public void reloadXml(String xmlFname) { // remove the table (if exists) if (tableSpatial != null) { MyRigidBodyControl rbc = tableSpatial.getControl(MyRigidBodyControl.class); if (rbc != null) { bulletAppState.getPhysicsSpace().remove(rbc); tableSpatial.removeControl(rbc); } rootNode.detachChild(tableSpatial); } // remove all free items (items not currently being grasped) inventory.removeAllFreeItems(); uniqueIds.clear(); for (Spatial s : inventory.allItems()) { uniqueIds.add(s.getName()); } idSN = 0; defs.clear(); xmlFnameLoaded.clear(); Document doc = parseXmlFile(xmlFname); if (doc != null) { processIncludeElements(doc); processDefElements(doc); processInstanceElements(doc, doc.getDocumentElement(), new HashMap<String, String>()); writeXmlToFile(doc, "tablesetup/debug.xml"); doc = validateXmlTree(doc, true); processXmlTree(doc); } // relocate the robot according to table size robotLocationNode.setLocalTransform(Transform.IDENTITY); robotLocationNode.setLocalTranslation(0, 2, tableDepth / 2 + 3); robotLocationNode.setLocalRotation(new Quaternion().fromAngleAxis(FastMath.HALF_PI, Vector3f.UNIT_Y)); } /** * Validate an existing DOM tree. * @param doc * @return a document node after validation. */ private Document validateXmlTree(Document doc, boolean deep) { // make schema SchemaFactory sf = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); Schema schema = null; try { schema = sf.newSchema(new File(deep ? SCHEMA_DEEP_FNAME : SCHEMA_SHALLOW_FNAME)); } catch (SAXException e1) { String msg = "schema error: " + SCHEMA_DEEP_FNAME; logger.log(Level.WARNING, msg, e1); showMessageDialog(msg + ": " + e1.getMessage(), 400); return doc; } // validate Validator validator = schema.newValidator(); DOMResult res = new DOMResult(); try { validator.validate(new DOMSource(doc), res); } catch (SAXException | IOException e1) { String msg = "schema validation error"; logger.log(Level.WARNING, msg, e1); showMessageDialog(msg + ": " + e1.getMessage(), 400); return doc; } return (Document) res.getNode(); } /** * Parse XML file into a DOM tree without validation. * @param fname * @return a document node */ private Document parseXmlFile(String fname) { // make document parser DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setNamespaceAware(true); dbf.setSchema(null); DocumentBuilder db = null; try { db = dbf.newDocumentBuilder(); } catch (ParserConfigurationException e1) { String msg = "parse error: " + fname; logger.log(Level.WARNING, msg, e1); showMessageDialog(msg + ": " + e1.getMessage(), 400); return null; } db.setErrorHandler(new ErrorHandler() { @Override public void warning(SAXParseException exception) throws SAXException { } @Override public void error(SAXParseException exception) throws SAXException { throw exception; } @Override public void fatalError(SAXParseException exception) throws SAXException { throw exception; } }); // parse document Document doc = null; try { doc = db.parse(new File(fname)); } catch (SAXException e) { String msg = "cannot parse " + fname; logger.log(Level.WARNING, msg, e); showMessageDialog(msg + ": " + e.getMessage(), 400); return null; } catch (IOException e) { String msg = "cannot read from " + fname; logger.log(Level.WARNING, msg, e); showMessageDialog(msg + ": " + e.getMessage(), 400); return null; } catch (RuntimeException e) { String msg = "an error occurs in " + fname; logger.log(Level.WARNING, msg, e); showMessageDialog(msg + ": " + e.getMessage(), 400); return null; } doc = validateXmlTree(doc, false); xmlFnameLoaded.add(fname); return doc; } private void writeXmlToFile(Document doc, String fname) { Transformer transformer; try { transformer = TransformerFactory.newInstance().newTransformer(); } catch (TransformerConfigurationException | TransformerFactoryConfigurationError e) { e.printStackTrace(); return; } Result output = new StreamResult(new File(fname)); Source input = new DOMSource(doc); try { transformer.transform(input, output); } catch (TransformerException e) { e.printStackTrace(); return; } } @SuppressWarnings("unused") private void walkXmlTree(org.w3c.dom.Node root, String path) { System.out.println(path + ": " + root); path += "->" + root; for (org.w3c.dom.Node child = root.getFirstChild(); child != null; child = child.getNextSibling()) { walkXmlTree(child, path); } } /** * Expands all {@code <include>} elements in the document recursively. * @param doc */ private void processIncludeElements(Document doc) { Element root = doc.getDocumentElement(); for (org.w3c.dom.Node child = root.getFirstChild(); child != null;) { org.w3c.dom.Node nextChild = child.getNextSibling(); if (child.getNodeType() == org.w3c.dom.Node.ELEMENT_NODE && child.getNodeName().equals("include")) { String fname = ((Element) child).getAttribute("file"); Document incDoc = null; if (!xmlFnameLoaded.contains(fname)) { incDoc = parseXmlFile(fname); } else { String msg = "xml file " + fname + " has already been loaded (skip)"; logger.log(Level.WARNING, msg); } if (incDoc != null) { processIncludeElements(incDoc); Element incRoot = incDoc.getDocumentElement(); for (org.w3c.dom.Node incChild = incRoot.getFirstChild(); incChild != null;) { org.w3c.dom.Node nextIncChild = incChild.getNextSibling(); incChild = doc.importNode(incChild, true); if (incChild != null) { root.insertBefore(incChild, child); } incChild = nextIncChild; } } root.removeChild(child); } child = nextChild; } } /** * Store and remove all {@code <def>} elements from {@code doc}. * @param doc */ private void processDefElements(Document doc) { Element root = doc.getDocumentElement(); for (org.w3c.dom.Node child = root.getFirstChild(); child != null;) { org.w3c.dom.Node nextChild = child.getNextSibling(); if (child.getNodeType() == org.w3c.dom.Node.ELEMENT_NODE && child.getNodeName().equals("def")) { String name = ((Element) child).getAttribute("name"); if (defs.containsKey(name)) { String msg = "Duplicated definition detected: " + name + " (overwrite)"; logger.log(Level.WARNING, msg); showMessageDialog(msg, 400); defs.remove(name); defVars.remove(name); } // get default variable values in def HashMap<String, String> vars = new HashMap<>(); for (org.w3c.dom.Node defChild = child.getFirstChild(); defChild != null;) { org.w3c.dom.Node nextDefChild = defChild.getNextSibling(); if (defChild.getNodeType() == org.w3c.dom.Node.ELEMENT_NODE && defChild.getNodeName().equals("var")) { vars.put(((Element) defChild).getAttribute("name"), ((Element) defChild).getAttribute("value")); child.removeChild(defChild); } defChild = nextDefChild; } defs.put(name, (Element) child); defVars.put(name, vars); root.removeChild(child); } child = nextChild; } } /** * Process the subtree {@code root} for {@code <instance>}-related work: * (1) expand all child {@code <instance>} nodes; * (2) substitute the root's variable values. * @param doc * @param root * @param vars */ private void processInstanceElements(Document doc, Element root, Map<String, String> vars) { // substitute variable values performVariableSubst(root, vars); if (root.getNodeName().equals("instance")) { performInstanceExpansion(doc, root, vars); } else if (root.getNodeName().equals("def")) { String msg = "Element &lt;def&gt; is not allowed here (removed)"; logger.log(Level.WARNING, msg); showMessageDialog(msg, 400); org.w3c.dom.Node parent = root.getParentNode(); if (parent.getNodeType() != org.w3c.dom.Node.DOCUMENT_NODE) { parent.removeChild(root); } } else { for (org.w3c.dom.Node child = root.getFirstChild(); child != null;) { org.w3c.dom.Node nextChild = child.getNextSibling(); if (child.getNodeType() == org.w3c.dom.Node.ELEMENT_NODE) { processInstanceElements(doc, (Element) child, vars); } child = nextChild; } } } private void performInstanceExpansion(Document doc, Element elm, Map<String, String> vars) { // look up def String defName = elm.getAttribute("def"); DocumentFragment frag = doc.createDocumentFragment(); if (!defs.containsKey(defName)) { String msg = "Definition not found: " + defName + " (ignored)"; logger.log(Level.WARNING, msg); showMessageDialog(msg, 400); } else { // make a copy of def Element def = defs.get(defName); for (org.w3c.dom.Node defChild = def.getFirstChild(); defChild != null; defChild = defChild.getNextSibling()) { frag.appendChild(defChild.cloneNode(true)); } // substitute for variable definitions under <instance> vars.putAll(defVars.get(defName)); for (org.w3c.dom.Node child = elm.getFirstChild(); child != null; child = child.getNextSibling()) { if (child.getNodeType() == org.w3c.dom.Node.ELEMENT_NODE && child.getNodeName().equals("var")) { performVariableSubst((Element) child, vars); } } // get variable values for (org.w3c.dom.Node child = elm.getFirstChild(); child != null; child = child.getNextSibling()) { if (child.getNodeType() == org.w3c.dom.Node.ELEMENT_NODE && child.getNodeName().equals("var")) { vars.put(((Element) child).getAttribute("name"), ((Element) child).getAttribute("value")); } } // recursively process those new cloned nodes in frag for (org.w3c.dom.Node dfChild = frag.getFirstChild(); dfChild != null; dfChild = dfChild.getNextSibling()) { if (dfChild.getNodeType() == org.w3c.dom.Node.ELEMENT_NODE) { processInstanceElements(doc, (Element) dfChild, new HashMap<>(vars)); } } } elm.getParentNode().insertBefore(frag, elm); elm.getParentNode().removeChild(elm); } private void performVariableSubst(Element elm, Map<String, String> vars) { NamedNodeMap attrs = elm.getAttributes(); for (int i = 0; i < attrs.getLength(); ++i) { org.w3c.dom.Node attr = attrs.item(i); if (elm.getNodeName().equals("var") && attr.getNodeName().equals("name")) { continue; } String attrVal = attr.getNodeValue(); Pattern pat = Pattern.compile("__([_a-zA-Z0-9]+)(.*?)__"); Matcher mat = pat.matcher(attrVal); StringBuffer buf = new StringBuffer(); while (mat.find()) { String varName = "__" + mat.group(1) + "__"; if (vars.containsKey(varName)) { String varValue = vars.get(varName); varValue = performVariableArithmetic(varValue, mat.group(2)); mat.appendReplacement(buf, varValue); } else { mat.appendReplacement(buf, mat.group()); } } mat.appendTail(buf); attr.setNodeValue(buf.toString()); } } private String performVariableArithmetic(String val, String postfix) { if (postfix == null || postfix.length() == 0) { return val; } Pattern pat = Pattern.compile("^\\s*([\\+\\-\\*/])\\s*(\\d+(?:\\.\\d+)?)\\s*$"); Matcher mat = pat.matcher(postfix); if (mat.find()) { String operator = mat.group(1); String operand = mat.group(2); try { float v1 = Float.parseFloat(val); float v2 = Float.parseFloat(operand); if (operator.equals("+")) { v1 += v2; } else if (operator.equals("-")) { v1 -= v2; } else if (operator.equals("*")) { v1 *= v2; } else if (operator.equals("/")) { v1 /= v2; } val = "" + v1; } catch (NumberFormatException e) { if (operator.equals("+")) { // string concatenation val += operand; } else { String msg = "cannot perform variable arithmetic: " + val + operator + operand; logger.log(Level.WARNING, msg); showMessageDialog(msg, 400); } } } return val; } private void processXmlTree(Document doc) { Element root = doc.getDocumentElement(); tableWidth = Float.parseFloat(root.getAttribute("xspan")); tableDepth = Float.parseFloat(root.getAttribute("yspan")); makeTable(); for (org.w3c.dom.Node child = root.getFirstChild(); child != null; child = child.getNextSibling()) { if (child.getNodeType() != org.w3c.dom.Node.ELEMENT_NODE) { continue; } Element elm = (Element) child; if (elm.getNodeName().equals("block")) { processBlockElement(elm, true); } else if (elm.getNodeName().equals("cylinder")) { processCylinderElement(elm, true); } else if (elm.getNodeName().equals("sphere")) { processSphereElement(elm, true); } else if (elm.getNodeName().equals("box")) { processBoxElement(elm, true); } else if (elm.getNodeName().equals("custom")) { processCustomElement(elm, true); } else if (elm.getNodeName().equals("lid")) { processLidElement(elm, true); } else if (elm.getNodeName().equals("cartridge")) { processCartridgeElement(elm, true); } else if (elm.getNodeName().equals("composite")) { processCompositeElement(elm, true, null); } else if (elm.getNodeName().equals("chain")) { processChainElement(elm); } else if (elm.getNodeName().equals("lidbox")) { processLidBoxElement(elm); } else if (elm.getNodeName().equals("dock")) { processDockElement(elm); } else if (elm.getNodeName().equals("sliderJoint")) { processSliderJointElement(elm); } else { logger.log(Level.WARNING, "skipping unknown element " + elm.getNodeName()); } } inventory.resolveStateControlDownstreamIds(); } private void showMessageDialog(String msg, int dialogWidth) { JOptionPane.showMessageDialog(null, "<html><body width='" + dialogWidth + "'>" + msg + "</body></html>"); } // private List<Element> getElementList(NodeList nodeList) { // List<Element> ret = new LinkedList<>(); // for (int i = 0; i < nodeList.getLength(); ++i) { // if (nodeList.item(i).getNodeType() == org.w3c.dom.Node.ELEMENT_NODE) { // ret.add((Element) nodeList.item(i)); // return ret; private void makeTable() { // make table tableSpatial = factory.makeBigBlock(name, tableWidth, TABLE_HEIGHT, tableDepth, ColorRGBA.White, 4); tableSpatial.setLocalTranslation(0, -TABLE_HEIGHT / 2, 0); MyRigidBodyControl rbc = new MyRigidBodyControl(0); tableSpatial.addControl(rbc); bulletAppState.getPhysicsSpace().add(rbc); rootNode.attachChild(tableSpatial); } private void processSliderJointElement(Element e) { // ignore id Vector3f location = parseVector3(e.getAttribute("location")); Vector3f rotation = parseVector3(e.getAttribute("rotation")); float min = Float.parseFloat(e.getAttribute("min")); float max = Float.parseFloat(e.getAttribute("max")); if (min > max) { logger.log(Level.SEVERE, "slider joint has min > max"); return; } float init = Float.parseFloat(e.getAttribute("init")); init = FastMath.clamp(init, min, max); boolean collision = Boolean.parseBoolean(e.getAttribute("collision")); Spatial[] objs = new Spatial[2]; int k = 0; for (org.w3c.dom.Node child = e.getFirstChild(); child != null; child = child.getNextSibling()) { if (child.getNodeType() != org.w3c.dom.Node.ELEMENT_NODE) { continue; } Element childElm = (Element) child; Spatial obj = null; if (childElm.getNodeName().equals("block")) { obj = processBlockElement(childElm, true); } else if (childElm.getNodeName().equals("cylinder")) { obj = processCylinderElement(childElm, true); } else if (childElm.getNodeName().equals("sphere")) { obj = processSphereElement(childElm, true); } else if (childElm.getNodeName().equals("box")) { obj = processBoxElement(childElm, true); } else if (childElm.getNodeName().equals("custom")) { obj = processCustomElement(childElm, true); } else if (childElm.getNodeName().equals("lid")) { obj = processLidElement(childElm, true); } else if (childElm.getNodeName().equals("cartridge")) { obj = processCartridgeElement(childElm, true); } else if (childElm.getNodeName().equals("composite")) { obj = processCompositeElement(childElm, true, null); } if (obj != null) { if (k >= 2) { String msg = "sliderJoint " + e.getAttribute("id") + " contains more than two objects: " + obj.getName() + " (ignored)"; logger.log(Level.WARNING, msg); showMessageDialog(msg, 400); } else { objs[k] = obj; ++k; } } } // make joint between the two objects if (objs[0] != null && objs[1] != null) { Transform jointTrans = new Transform(location, new Quaternion().fromAngles( rotation.x * FastMath.DEG_TO_RAD, rotation.y * FastMath.DEG_TO_RAD, rotation.z * FastMath.DEG_TO_RAD)); MyRigidBodyControl c; Transform trans; // transform obj1 using ((the joint's transform) * (obj1's local transform)) Transform obj1Trans = objs[0].getLocalTransform(); trans = obj1Trans.clone(); trans.combineWithParent(jointTrans); c = objs[0].getControl(MyRigidBodyControl.class); c.setPhysicsLocation(trans.getTranslation()); c.setPhysicsRotation(trans.getRotation()); // transform obj2 using ((the joint's transform) * (slide to init pos) * (obj1's local transform)) Transform obj2Trans = objs[1].getLocalTransform(); trans = obj2Trans.clone(); trans.combineWithParent(new Transform(Vector3f.UNIT_X.mult(-init))); // slide to initial position trans.combineWithParent(jointTrans); c = objs[1].getControl(MyRigidBodyControl.class); c.setPhysicsLocation(trans.getTranslation()); c.setPhysicsRotation(trans.getRotation()); // note the negate/transpose: because the objects' local transforms are relative to the pivots, // but these parameters take the pivots' transforms relative to the objects. inventory.addSliderJoint(objs[0], objs[1], obj1Trans.getTranslation().negate(), obj2Trans.getTranslation().negate(), obj1Trans.getRotation().toRotationMatrix().transpose(), obj2Trans.getRotation().toRotationMatrix().transpose(), min, max, collision); } } private Spatial processBlockElement(Element elm, boolean isWhole) { String id = elm.getAttribute("id"); if (isWhole) { id = getUniqueId(id); } Vector3f location = parseVector3(elm.getAttribute("location")); Vector3f rotation = parseVector3(elm.getAttribute("rotation")); ColorRGBA color = parseColor(elm.getAttribute("color")); float xspan = Float.parseFloat(elm.getAttribute("xspan")); float yspan = Float.parseFloat(elm.getAttribute("zspan")); float zspan = Float.parseFloat(elm.getAttribute("yspan")); Spatial s = factory.makeBlock(id, xspan, yspan, zspan, color); s.setLocalTranslation(location); s.setLocalRotation(new Quaternion().fromAngles( rotation.x * FastMath.DEG_TO_RAD, rotation.y * FastMath.DEG_TO_RAD, rotation.z * FastMath.DEG_TO_RAD)); if (isWhole) { float mass = Float.parseFloat(elm.getAttribute("mass")); inventory.addItem(s, mass); s.setUserData("obj_shape", "block"); s.setUserData("obj_xspan", xspan); s.setUserData("obj_zspan", yspan); s.setUserData("obj_yspan", zspan); s.setUserData("obj_color", color); } return s; } private Spatial processCylinderElement(Element elm, boolean isWhole) { String id = elm.getAttribute("id"); if (isWhole) { id = getUniqueId(id); } Vector3f location = parseVector3(elm.getAttribute("location")); Vector3f rotation = parseVector3(elm.getAttribute("rotation")); ColorRGBA color = parseColor(elm.getAttribute("color")); float radius = Float.parseFloat(elm.getAttribute("radius")); float zspan = Float.parseFloat(elm.getAttribute("yspan")); Spatial s = factory.makeCylinder(id, radius, zspan, color); s.setLocalTranslation(location); s.setLocalRotation(new Quaternion().fromAngles( rotation.x * FastMath.DEG_TO_RAD, rotation.y * FastMath.DEG_TO_RAD, rotation.z * FastMath.DEG_TO_RAD)); if (isWhole) { float mass = Float.parseFloat(elm.getAttribute("mass")); inventory.addItem(s, mass); s.setUserData("obj_shape", "cylinder"); s.setUserData("obj_radius", radius); s.setUserData("obj_yspan", zspan); s.setUserData("obj_color", color); } return s; } private Spatial processSphereElement(Element elm, boolean isWhole) { String id = elm.getAttribute("id"); if (isWhole) { id = getUniqueId(id); } Vector3f location = parseVector3(elm.getAttribute("location")); Vector3f rotation = parseVector3(elm.getAttribute("rotation")); ColorRGBA color = parseColor(elm.getAttribute("color")); float radius = Float.parseFloat(elm.getAttribute("radius")); Spatial s = factory.makeSphere(id, radius, color); s.setLocalTranslation(location); s.setLocalRotation(new Quaternion().fromAngles( rotation.x * FastMath.DEG_TO_RAD, rotation.y * FastMath.DEG_TO_RAD, rotation.z * FastMath.DEG_TO_RAD)); if (isWhole) { float mass = Float.parseFloat(elm.getAttribute("mass")); inventory.addItem(s, mass); s.setUserData("obj_shape", "cylinder"); s.setUserData("obj_radius", radius); s.setUserData("obj_color", color); } return s; } private Spatial processBoxElement(Element elm, boolean isWhole) { String id = elm.getAttribute("id"); if (isWhole) { id = getUniqueId(id); } Vector3f location = parseVector3(elm.getAttribute("location")); Vector3f rotation = parseVector3(elm.getAttribute("rotation")); ColorRGBA color = parseColor(elm.getAttribute("color")); float xspan = Float.parseFloat(elm.getAttribute("xspan")); float yspan = Float.parseFloat(elm.getAttribute("zspan")); float zspan = Float.parseFloat(elm.getAttribute("yspan")); float thickness = Float.parseFloat(elm.getAttribute("thickness")); Spatial s = factory.makeBoxContainer(id, xspan, yspan, zspan, thickness, color); s.setLocalTranslation(location); s.setLocalRotation(new Quaternion().fromAngles( rotation.x * FastMath.DEG_TO_RAD, rotation.y * FastMath.DEG_TO_RAD, rotation.z * FastMath.DEG_TO_RAD)); if (isWhole) { float mass = Float.parseFloat(elm.getAttribute("mass")); inventory.addItem(s, mass); s.setUserData("obj_shape", "box"); s.setUserData("obj_xspan", xspan); s.setUserData("obj_zspan", yspan); s.setUserData("obj_yspan", zspan); s.setUserData("obj_color", color); s.setUserData("obj_thickness", thickness); } return s; } private Spatial processCustomElement(Element elm, boolean isWhole) { String id = getUniqueId(elm.getAttribute("id")); Vector3f location = parseVector3(elm.getAttribute("location")); Vector3f rotation = parseVector3(elm.getAttribute("rotation")); ColorRGBA color = parseColor(elm.getAttribute("color")); float scale = Float.parseFloat(elm.getAttribute("scale")); String file = elm.getAttribute("file"); Spatial s = factory.makeCustom(id, file, color, scale); s.setLocalTranslation(location); s.setLocalRotation(new Quaternion().fromAngles( rotation.x * FastMath.DEG_TO_RAD, rotation.y * FastMath.DEG_TO_RAD, rotation.z * FastMath.DEG_TO_RAD)); if (isWhole) { float mass = Float.parseFloat(elm.getAttribute("mass")); inventory.addItem(s, mass); s.setUserData("obj_shape", "custom"); s.setUserData("obj_color", color); s.setUserData("obj_scale", scale); } return s; } private Spatial processLidElement(Element elm, boolean isWhole) { String id = elm.getAttribute("id"); if (isWhole) { id = getUniqueId(id); } Vector3f location = parseVector3(elm.getAttribute("location")); Vector3f rotation = parseVector3(elm.getAttribute("rotation")); ColorRGBA color = parseColor(elm.getAttribute("color")); float xspan = Float.parseFloat(elm.getAttribute("xspan")); float zspan = Float.parseFloat(elm.getAttribute("yspan")); float thickness = Float.parseFloat(elm.getAttribute("thickness")); float handleXspan = Float.parseFloat(elm.getAttribute("handleXspan")); float handleYspan = Float.parseFloat(elm.getAttribute("handleZspan")); float handleZspan = Float.parseFloat(elm.getAttribute("handleYspan")); float handleThickness = Float.parseFloat(elm.getAttribute("handleThickness")); ColorRGBA handleColor = parseColor(elm.getAttribute("handleColor")); Node s = new Node(id); s.setLocalTranslation(location); s.setLocalRotation(new Quaternion().fromAngles( rotation.x * FastMath.DEG_TO_RAD, rotation.y * FastMath.DEG_TO_RAD, rotation.z * FastMath.DEG_TO_RAD)); Spatial lidBody = factory.makeBlock(id + "-lidbody", xspan, thickness, zspan, color); s.attachChild(lidBody); Spatial lidHandle = factory.makeBoxContainer(id + "-lidhandle", handleXspan, handleYspan, handleZspan, handleThickness, handleColor); lidHandle.setLocalTranslation(0, thickness / 2 + handleYspan / 2, 0); s.attachChild(lidHandle); if (isWhole) { float lidMass = Float.parseFloat(elm.getAttribute("mass")); inventory.addItem(s, lidMass); s.setUserData("obj_shape", "lid"); s.setUserData("obj_color", color); s.setUserData("obj_xspan", xspan); s.setUserData("obj_yspan", zspan); s.setUserData("obj_thickness", thickness); s.setUserData("obj_handleXspan", handleXspan); s.setUserData("obj_handleZspan", handleYspan); s.setUserData("obj_handleYspan", handleZspan); s.setUserData("obj_handleThickness", handleThickness); s.setUserData("obj_handleColor", handleColor); } return s; } private Spatial processCartridgeElement(Element elm, boolean isWhole) { String groupId = getUniqueId(elm.getAttribute("id")); Vector3f location = parseVector3(elm.getAttribute("location")); Vector3f rotation = parseVector3(elm.getAttribute("rotation")); float xspan = Float.parseFloat(elm.getAttribute("xspan")); float yspan = Float.parseFloat(elm.getAttribute("zspan")); float zspan = Float.parseFloat(elm.getAttribute("yspan")); ColorRGBA bodyColor = parseColor(elm.getAttribute("color")); ColorRGBA handleColor = parseColor(elm.getAttribute("handleColor")); ColorRGBA topColor = parseColor(elm.getAttribute("topColor")); float mass = Float.parseFloat(elm.getAttribute("mass")); Node node = new Node(groupId); node.setLocalTranslation(location); node.setLocalRotation(new Quaternion().fromAngles( rotation.x * FastMath.DEG_TO_RAD, rotation.y * FastMath.DEG_TO_RAD, rotation.z * FastMath.DEG_TO_RAD)); String id; // body - central piece id = getUniqueId(groupId + "-bodyC"); float wBodyC = xspan * 3 / 7; float hBodyC = yspan; float dBodyC = zspan * 3 / 4; Spatial bodyC = factory.makeBlock(id, wBodyC, hBodyC, dBodyC, bodyColor); node.attachChild(bodyC); // body - left piece id = getUniqueId(groupId + "-bodyL"); float wBodyL = xspan * 2 / 7; float hBodyL = yspan; float dBodyL = zspan; // 0.8163f Spatial bodyL = factory.makeBlock(id, wBodyL, hBodyL, dBodyL, bodyColor); bodyL.setLocalTranslation(-(wBodyC / 2 + wBodyL / 2), 0, 0); node.attachChild(bodyL); // body - right piece id = getUniqueId(groupId + "-bodyR"); Spatial bodyR = factory.makeBlock(id, wBodyL, hBodyL, dBodyL, bodyColor); bodyR.setLocalTranslation(wBodyC / 2 + wBodyL / 2, 0, 0); node.attachChild(bodyR); // body - left foot id = getUniqueId(groupId + "-bodyLF"); float wBodyLF = xspan * 0.3116f; float hBodyLF = yspan * 0.1781f; float dBodyLF = zspan; Spatial bodyLF = factory.makeBlock(id, wBodyLF, hBodyLF, dBodyLF, bodyColor); bodyLF.setLocalTranslation(-(wBodyC / 2 + wBodyLF / 2), -(hBodyL / 2 + hBodyLF / 2), 0); node.attachChild(bodyLF); // body - right foot id = getUniqueId(groupId + "-bodyRF"); Spatial bodyRF = factory.makeBlock(id, wBodyLF, hBodyLF, dBodyLF, bodyColor); bodyRF.setLocalTranslation(wBodyC / 2 + wBodyLF / 2, -(hBodyL / 2 + hBodyLF / 2), 0); node.attachChild(bodyRF); // top id = getUniqueId(groupId + "-top"); float wTop = xspan * 1.1225f; float hTop = yspan * 0.1818f; float dTop = zspan * 1.225f; Spatial top = factory.makeBlock(id, wTop, hTop, dTop, topColor); top.setLocalTranslation(0, hBodyC / 2 + hTop / 2, 0); node.attachChild(top); // handle id = getUniqueId(groupId + "-top"); float wHandle = xspan * 0.7375f; float hHandle = yspan * 0.4545f; float dHandle = zspan * 0.695f; Spatial handle = factory.makeBlock(id, wHandle, hHandle, dHandle, handleColor); handle.setLocalTranslation(0, (hBodyC + hTop) / 2 + hHandle / 2, 0); node.attachChild(handle); // bottom attach point id = getUniqueId(groupId + "-assemblyPoint"); Node att = new Node(id); att.setLocalTranslation(0, -hBodyL / 2 - hBodyLF, 0); att.setLocalRotation(new Quaternion().fromAngles(FastMath.HALF_PI, 0, 0)); att.setUserData("assembly", "cartridgeSlot"); att.setUserData("assemblyEnd", 1); node.attachChild(att); inventory.addItem(node, mass); // annotate... node.setUserData("obj_shape", "cartridge"); node.setUserData("obj_width", xspan); node.setUserData("obj_height", yspan); node.setUserData("obj_depth", zspan); node.setUserData("obj_color", bodyColor); node.setUserData("obj_handleColor", handleColor); node.setUserData("obj_topColor", topColor); return node; } private Spatial processCompositeElement(Element elm, boolean isWhole, Map<Spatial, StateControl> stateControlMap) { String id = elm.getAttribute("id"); if (isWhole) { id = getUniqueId(id); stateControlMap = new HashMap<>(); } Vector3f location = parseVector3(elm.getAttribute("location")); Vector3f rotation = parseVector3(elm.getAttribute("rotation")); Node node = new Node(id); node.setLocalTranslation(location); node.setLocalRotation(new Quaternion().fromAngles( rotation.x * FastMath.DEG_TO_RAD, rotation.y * FastMath.DEG_TO_RAD, rotation.z * FastMath.DEG_TO_RAD)); for (org.w3c.dom.Node child = elm.getFirstChild(); child != null; child = child.getNextSibling()) { if (child.getNodeType() != org.w3c.dom.Node.ELEMENT_NODE) { continue; } Element childElm = (Element) child; if (childElm.getNodeName().equals("block")) { node.attachChild(processBlockElement(childElm, false)); } else if (childElm.getNodeName().equals("cylinder")) { node.attachChild(processCylinderElement(childElm, false)); } else if (childElm.getNodeName().equals("sphere")) { node.attachChild(processSphereElement(childElm, false)); } else if (childElm.getNodeName().equals("box")) { node.attachChild(processBoxElement(childElm, false)); } else if (childElm.getNodeName().equals("custom")) { node.attachChild(processCustomElement(childElm, false)); } else if (childElm.getNodeName().equals("composite")) { node.attachChild(processCompositeElement(childElm, false, stateControlMap)); } else if (childElm.getNodeName().equals("toggleSwitch")) { node.attachChild(processToggleSwitchElement(childElm, stateControlMap)); } else if (childElm.getNodeName().equals("indicatorSet")) { node.attachChild(processIndicatorSetElement(childElm, stateControlMap)); } else { logger.log(Level.WARNING, "skipping unknown composite element " + childElm.getNodeName()); } } if (isWhole) { float mass = Float.parseFloat(elm.getAttribute("mass")); inventory.addItem(node, mass); // register state controls for (Map.Entry<Spatial, StateControl> e : stateControlMap.entrySet()) { inventory.registerStateControl(e.getKey(), e.getValue()); } node.setUserData("obj_shape", "composite"); } return node; } private Spatial processToggleSwitchElement(Element elm, Map<Spatial, StateControl> stateControlMap) { String id = getUniqueId(elm.getAttribute("id")); Vector3f location = parseVector3(elm.getAttribute("location")); Vector3f rotation = parseVector3(elm.getAttribute("rotation")); float xspan = Float.parseFloat(elm.getAttribute("xspan")); float zspan = Float.parseFloat(elm.getAttribute("yspan")); float yspan = Float.parseFloat(elm.getAttribute("zspan")); float angle = Float.parseFloat(elm.getAttribute("angle")) * FastMath.DEG_TO_RAD; ColorRGBA color = parseColor(elm.getAttribute("color")); boolean leftPressed = Boolean.parseBoolean(elm.getAttribute("leftPressed")); int numStates = Integer.parseInt(elm.getAttribute("numStates")); int initState = Integer.parseInt(elm.getAttribute("initState")); float btxspan = xspan / (1.0f + FastMath.cos(angle)); Node s = new Node(id); s.setLocalTranslation(location); s.setLocalRotation(new Quaternion().fromAngles( rotation.x * FastMath.DEG_TO_RAD, rotation.y * FastMath.DEG_TO_RAD, rotation.z * FastMath.DEG_TO_RAD)); // button 1 Spatial b1 = factory.makeBlock(id + "-b1", btxspan, yspan, zspan, color); b1.setLocalTranslation(-btxspan / 2, yspan / 2, 0); s.attachChild(b1); // button 2 Spatial b2 = factory.makeBlock(id + "-b2", btxspan, yspan, zspan, color); b2.setLocalRotation(new Quaternion().fromAngles(0, 0, angle)); float hypoLen = FastMath.sqr(btxspan * btxspan + yspan * yspan); // hypotenues float cosine = (btxspan / hypoLen) * FastMath.cos(angle) - (yspan / hypoLen) * FastMath.sin(angle); float sine = (yspan / hypoLen) * FastMath.cos(angle) + (btxspan / hypoLen) * FastMath.sin(angle); b2.setLocalTranslation(hypoLen / 2f * cosine, hypoLen / 2f * sine, 0); s.attachChild(b2); // get <downstream> and <state> elements LinkedList<String> dsIds = new LinkedList<>(); for (org.w3c.dom.Node child = elm.getFirstChild(); child != null; child = child.getNextSibling()) { if (child.getNodeType() == org.w3c.dom.Node.ELEMENT_NODE && child.getNodeName().equals("downstream")) { dsIds.add(((Element) child).getAttribute("id")); } } ToggleSwitchControl c = new ToggleSwitchControl(inventory, s, angle, leftPressed, numStates, initState); for (String dsId : dsIds) { c.addDownstreamId(dsId); } stateControlMap.put(s, c); return s; } private Spatial processIndicatorSetElement(Element elm, Map<Spatial, StateControl> stateControlMap) { String id = getUniqueId(elm.getAttribute("id")); Vector3f location = parseVector3(elm.getAttribute("location")); Vector3f rotation = parseVector3(elm.getAttribute("rotation")); float xspan = Float.parseFloat(elm.getAttribute("xspan")); float lightZspan = Float.parseFloat(elm.getAttribute("lightZspan")); float lightRadius = Float.parseFloat(elm.getAttribute("lightRadius")); int numLights = Integer.parseInt(elm.getAttribute("numLights")); int initState = Integer.parseInt(elm.getAttribute("initState")); Node s = new Node(id); s.setLocalTranslation(location); s.setLocalRotation(new Quaternion().fromAngles( rotation.x * FastMath.DEG_TO_RAD, rotation.y * FastMath.DEG_TO_RAD, rotation.z * FastMath.DEG_TO_RAD)); float lightIntv = 0; Vector3f lightPos = new Vector3f(); if (numLights > 1) { lightIntv = (xspan - lightRadius * 2) / (numLights - 1); lightPos.x = -xspan / 2 + lightRadius; lightPos.y = lightZspan / 2; } for (int i = 0; i < numLights; ++i) { Spatial light = factory.makeCylinder(id + "-light" + i, lightRadius, lightZspan, ColorRGBA.Black); light.setLocalRotation(new Quaternion().fromAngleAxis(FastMath.HALF_PI, Vector3f.UNIT_X)); light.setLocalTranslation(lightPos); lightPos.x += lightIntv; s.attachChild(light); } // get <downstream> and <state> elements LinkedList<String> dsIds = new LinkedList<>(); LinkedList<ColorRGBA[]> lightStates = new LinkedList<>(); for (org.w3c.dom.Node child = elm.getFirstChild(); child != null; child = child.getNextSibling()) { if (child.getNodeType() != org.w3c.dom.Node.ELEMENT_NODE) { continue; } if (child.getNodeName().equals("downstream")) { dsIds.add(((Element) child).getAttribute("id")); } else if (child.getNodeName().equals("state")) { ColorRGBA[] ls = new ColorRGBA[numLights]; // get <light> elements under <state> for (org.w3c.dom.Node grandChild = child.getFirstChild(); grandChild != null; grandChild = grandChild.getNextSibling()) { if (grandChild.getNodeType() == org.w3c.dom.Node.ELEMENT_NODE && grandChild.getNodeName().equals("light")) { int ind = Integer.parseInt(((Element) grandChild).getAttribute("id")); if (ind < 0 || ind >= numLights) { throw new IllegalArgumentException("invalid light id " + ind); } ColorRGBA color = parseColor(((Element) grandChild).getAttribute("color")); ls[ind] = color; } } lightStates.add(ls); } } IndicatorSetControl c = new IndicatorSetControl(inventory, s, initState, lightStates); for (String dsId : dsIds) { c.addDownstreamId(dsId); } stateControlMap.put(s, c); return s; // // button 1 // Spatial b1 = factory.makeBlock(id + "-b1", btxspan, yspan, zspan, color); // b1.setLocalTranslation(-btxspan / 2, yspan / 2, 0); // s.attachChild(b1); // // button 2 // Spatial b2 = factory.makeBlock(id + "-b2", btxspan, yspan, zspan, color); // b2.setLocalRotation(new Quaternion().fromAngles(0, 0, angle)); // float hypoLen = FastMath.sqr(btxspan * btxspan + yspan * yspan); // hypotenues // float cosine = (btxspan / hypoLen) * FastMath.cos(angle) - (yspan / hypoLen) * FastMath.sin(angle); // float sine = (yspan / hypoLen) * FastMath.cos(angle) + (btxspan / hypoLen) * FastMath.sin(angle); // b2.setLocalTranslation(hypoLen / 2f * cosine, hypoLen / 2f * sine, 0); // s.attachChild(b2); // ToggleSwitchControl c = new ToggleSwitchControl(inventory, s, angle, leftPressed, numStates, initState); // stateControls.put(s, c); // return s; } private void processChainElement(Element elm) { String groupId = getUniqueId(elm.getAttribute("id")); ColorRGBA color = parseColor(elm.getAttribute("color")); Vector3f start = parseVector3(elm.getAttribute("start")); Vector3f end = parseVector3(elm.getAttribute("end")); float linkXspan = Float.parseFloat(elm.getAttribute("linkXspan")); float linkYspan = Float.parseFloat(elm.getAttribute("linkZspan")); float linkZspan = Float.parseFloat(elm.getAttribute("linkYspan")); int linkCount = Integer.parseInt(elm.getAttribute("linkCount")); float linkPadding = Float.parseFloat(elm.getAttribute("linkPadding")); float linkMass = Float.parseFloat(elm.getAttribute("linkMass")); // check if linkCount is enough to connect from start to end locations float dist = start.distance(end); if (linkCount == 0) { linkCount = (int) FastMath.ceil(dist / linkZspan); logger.log(Level.INFO, "chain " + groupId + ": linkCount=" + linkCount); } else { if ((float) linkCount * linkZspan < dist - linkZspan * .5f) { throw new IllegalArgumentException("linkCount " + linkCount + " too low to connect the start and end locations"); } } // start making a chain Vector3f vec = new Vector3f(); // temporary storage final Vector3f endNodesSize = new Vector3f(.1f, .1f, .1f); final Vector3f linkPhysicsSize = new Vector3f(linkXspan / 2, linkYspan / 2, linkZspan / 2); // rotate the z axis to the start->end direction // when walking on the links from start to end, z increases in each local model space Quaternion rotStartEndDir = new Quaternion(); rotStartEndDir.lookAt(start.subtract(end), Vector3f.UNIT_Y); // make start node (static) String id; id = getUniqueId(groupId + "-start"); Spatial startNode = factory.makeBlock(id, endNodesSize.x, endNodesSize.y, endNodesSize.z, ColorRGBA.White); startNode.setLocalTranslation(start); startNode.setLocalRotation(rotStartEndDir); inventory.addItem(startNode, 0); startNode.setUserData("obj_shape", "chain-endpoint"); startNode.setUserData("obj_xspan", endNodesSize.x); startNode.setUserData("obj_yspan", endNodesSize.z); startNode.setUserData("obj_zspan", endNodesSize.y); // make end node (static) id = getUniqueId(groupId + "-end"); Spatial endNode = factory.makeBlock(id, endNodesSize.x, endNodesSize.y, endNodesSize.z, ColorRGBA.White); endNode.setLocalTranslation(end); endNode.setLocalRotation(rotStartEndDir); inventory.addItem(endNode, 0); startNode.setUserData("obj_shape", "chain-endpoint"); startNode.setUserData("obj_xspan", endNodesSize.x); startNode.setUserData("obj_yspan", endNodesSize.z); startNode.setUserData("obj_zspan", endNodesSize.y); Spatial prevSpatial = startNode; Vector3f prevJointPt = new Vector3f(0, 0, -endNodesSize.z); for (int i = 0; i < linkCount; ++i) { // make a link id = getUniqueId(groupId + "-link" + i); Spatial link = factory.makeBlock(id, linkXspan, linkYspan, linkZspan + linkPadding * 2, color); link.setLocalRotation(rotStartEndDir); vec.interpolate(start, end, (i + .5f) / linkCount); link.setLocalTranslation(vec); inventory.addItem(link, linkMass, new BoxCollisionShape(linkPhysicsSize)); link.getControl(MyRigidBodyControl.class).setAngularDamping(1); link.setUserData("obj_shape", "chain-link"); link.setUserData("obj_xspan", linkXspan); link.setUserData("obj_yspan", linkZspan); link.setUserData("obj_zspan", linkYspan); link.setUserData("obj_color", color); // connect the link using a joint (or constraint) SixDofJoint joint = inventory.addSixDofJoint(prevSpatial, link, prevJointPt, new Vector3f(0, 0, linkZspan / 2)); joint.setCollisionBetweenLinkedBodys(false); prevSpatial = link; prevJointPt = new Vector3f(0, 0, -linkZspan / 2); } // connect the last link to the end node vec.set(0, 0, endNodesSize.z); inventory.addSixDofJoint(prevSpatial, endNode, prevJointPt, vec); } private void processLidBoxElement(Element elm) { String groupId = getUniqueId(elm.getAttribute("id")); Vector3f location = parseVector3(elm.getAttribute("location")); Vector3f rotation = parseVector3(elm.getAttribute("rotation")); ColorRGBA color = parseColor(elm.getAttribute("color")); float xspan = Float.parseFloat(elm.getAttribute("xspan")); float yspan = Float.parseFloat(elm.getAttribute("zspan")); float zspan = Float.parseFloat(elm.getAttribute("yspan")); float thickness = Float.parseFloat(elm.getAttribute("thickness")); ColorRGBA handleColor = parseColor(elm.getAttribute("handleColor")); float handleXspan = Float.parseFloat(elm.getAttribute("handleXspan")); float handleYspan = Float.parseFloat(elm.getAttribute("handleZspan")); float handleZspan = Float.parseFloat(elm.getAttribute("handleYspan")); float handleThickness = Float.parseFloat(elm.getAttribute("handleThickness")); Transform tf = new Transform(); tf.setTranslation(location); tf.setRotation(new Quaternion().fromAngles( rotation.x * FastMath.DEG_TO_RAD, rotation.y * FastMath.DEG_TO_RAD, rotation.z * FastMath.DEG_TO_RAD)); String id; id = getUniqueId(groupId + "-box"); Spatial box = factory.makeBoxContainer(id, xspan, yspan, zspan, thickness, color); box.setLocalTransform(tf); float mass = Float.parseFloat(elm.getAttribute("mass")); inventory.addItem(box, mass); box.setUserData("obj_shape", "box"); box.setUserData("obj_xspan", xspan); box.setUserData("obj_zspan", yspan); box.setUserData("obj_yspan", zspan); box.setUserData("obj_thickness", thickness); box.setUserData("color", color); id = getUniqueId(groupId + "-lid"); Node lid = new Node(id); Spatial lidPlate = factory.makeBlock(id + "-lidbody", xspan, thickness, zspan, color); lid.attachChild(lidPlate); Spatial lidHandle = factory.makeBoxContainer(id + "-lidhandle", handleXspan, handleYspan, handleZspan, handleThickness, handleColor); lidHandle.setLocalTranslation(0, thickness / 2 + handleYspan / 2, 0); lid.attachChild(lidHandle); lid.setLocalTranslation(0, yspan / 2 + thickness / 2, 0); lid.setLocalTransform(lid.getLocalTransform().combineWithParent(tf)); float lidMass = Float.parseFloat(elm.getAttribute("lidMass")); inventory.addItem(lid, lidMass); lid.setUserData("obj_shape", "lid"); lid.setUserData("obj_xspan", xspan); lid.setUserData("obj_zspan", thickness); lid.setUserData("obj_yspan", zspan); lid.setUserData("obj_handleXspan", handleXspan); lid.setUserData("obj_handleZspan", handleYspan); lid.setUserData("obj_handleYspan", handleZspan); lid.setUserData("obj_handleThickness", handleThickness); lid.setUserData("obj_color", handleColor); inventory.addSliderJoint(box, lid, new Vector3f(0, yspan / 2, 0), new Vector3f(0, -thickness / 2, 0), null, null, 0, xspan, false); // joint.setDampingDirLin(0.1f); // joint.setDampingDirAng(0.1f); // joint.setSoftnessOrthoLin(1); // joint.setSoftnessOrthoAng(1); // System.out.println(joint.getDampingDirLin()); // System.out.println(joint.getDampingDirAng()); // System.out.println(joint.getSoftnessOrthoLin()); // System.out.println(joint.getSoftnessOrthoAng()); // joint.setCollisionBetweenLinkedBodys(false); // joint.setLowerLinLimit(0); // joint.setUpperLinLimit(xspan); // joint.setDampingDirLin(.001f); // joint.setRestitutionOrthoLin(.5f); // joint.setRestitutionDirLin(0); // joint.setPoweredLinMotor(true); // joint.setMaxLinMotorForce(1); // joint.setTargetLinMotorVelocity(-1); } private void processDockElement(Element elm) { final int NUM_MODULES = 4; String groupId = getUniqueId(elm.getAttribute("id")); Vector3f location = parseVector3(elm.getAttribute("location")); Vector3f rotation = parseVector3(elm.getAttribute("rotation")); float xspan = Float.parseFloat(elm.getAttribute("xspan")); float yspan = Float.parseFloat(elm.getAttribute("zspan")); float zspan = Float.parseFloat(elm.getAttribute("yspan")); float xThickness = Float.parseFloat(elm.getAttribute("xthickness")); float yThickness = Float.parseFloat(elm.getAttribute("zthickness")); float zThickness = Float.parseFloat(elm.getAttribute("ythickness")); float handleXspan = Float.parseFloat(elm.getAttribute("handleXspan")); float handleYspan = Float.parseFloat(elm.getAttribute("handleZspan")); float handleZspan = Float.parseFloat(elm.getAttribute("handleYspan")); ColorRGBA color = parseColor(elm.getAttribute("color")); ColorRGBA caseColor = parseColor(elm.getAttribute("caseColor")); ColorRGBA handleColor = parseColor(elm.getAttribute("handleColor")); float mass = Float.parseFloat(elm.getAttribute("mass")); float caseMass = Float.parseFloat(elm.getAttribute("caseMass")); int[] switchStates = new int[NUM_MODULES]; int[] lightStates = new int[NUM_MODULES]; for (int i = 0; i < NUM_MODULES; ++i) { switchStates[i] = Integer.parseInt(elm.getAttribute("switchState" + (i + 1))); lightStates[i] = Integer.parseInt(elm.getAttribute("lightState" + (i + 1))); } Transform tf = new Transform(); tf.setTranslation(location); tf.setRotation(new Quaternion().fromAngles( rotation.x * FastMath.DEG_TO_RAD, rotation.y * FastMath.DEG_TO_RAD, rotation.z * FastMath.DEG_TO_RAD)); String id; // case id = getUniqueId(groupId + "-case"); Spatial caseShape = factory.makeBoxContainer(id + "-shape", yspan, xspan - xThickness, zspan, yThickness, xThickness, zThickness, caseColor); caseShape.setLocalRotation(new Quaternion().fromAngleAxis(-FastMath.HALF_PI, Vector3f.UNIT_Z)); caseShape.setLocalTranslation(-xThickness / 2, 0, 0); Node caseNode = new Node(id); caseNode.setLocalTransform(tf); caseNode.attachChild(caseShape); inventory.addItem(caseNode, caseMass); caseNode.setUserData("obj_shape", "dock-case"); caseNode.setUserData("obj_width", caseShape.getUserData("obj_width")); caseNode.setUserData("obj_height", caseShape.getUserData("obj_height")); caseNode.setUserData("obj_depth", caseShape.getUserData("obj_depth")); caseNode.setUserData("obj_color", caseShape.getUserData("obj_color")); caseNode.setUserData("obj_xthickness", caseShape.getUserData("obj_xthickness")); caseNode.setUserData("obj_ythickness", caseShape.getUserData("obj_ythickness")); caseNode.setUserData("obj_zthickness", caseShape.getUserData("obj_zthickness")); // (component sizes and locations) float wBase = xspan - 2 * xThickness; float hPanel = 0.15f; float hBase = yspan - 2 * yThickness - 0.5f - hPanel; float dBase = zspan - 2 * zThickness; float thSlot = 0.087f; float wHole = 1.375f; float hHole = 0.648f; float dHole = 0.625f; float wHoleToPanel = 0.5f; float wPanel = wBase * 0.4048f; float wSwitch = 1.15f; float hSwitch = 0.05f; float dSwitch = 0.45f; float wPanelToSwitch = wBase * 0.0071f; float wSwitchToIndicator = wBase * 0.0119f; float rIndicator = dSwitch / 3; float hIndicator = hSwitch; // dock Node dockNode = new Node(groupId + "-body"); dockNode.setLocalTransform(tf); Node dockOffsetNode = new Node(groupId + "-bodyOffset"); dockOffsetNode.setLocalTranslation(0, -(yspan - yThickness * 2 - hBase) / 2, 0); dockNode.attachChild(dockOffsetNode); // dock base String baseId = getUniqueId(groupId + "-dock-base"); Node base = new Node(id); // dock base back id = getUniqueId(baseId + "-baseB"); float wBaseB = wBase - (wPanel + wHoleToPanel + wHole); Spatial baseB = factory.makeBlock(id, wBaseB, hBase, dBase, color); baseB.setLocalTranslation(-wBase / 2 + wBaseB / 2, 0, 0); base.attachChild(baseB); // dock base front id = getUniqueId(baseId + "-baseF"); float wBaseF = wPanel + wHoleToPanel; Spatial baseF = factory.makeBlock(id, wBaseF, hBase, dBase, color); baseF.setLocalTranslation(wBase / 2 - wBaseF / 2, 0, 0); base.attachChild(baseF); // dock base near wall id = getUniqueId(baseId + "-baseNW"); float dBaseNW = (dBase - dHole * 4) * 0.25f; Spatial baseNW = factory.makeBlock(id, wHole, hBase, dBaseNW, color); baseNW.setLocalTranslation(-wBase / 2 + wBaseB + wHole / 2, 0, -dBase / 2 + dBaseNW / 2); base.attachChild(baseNW); // dock base far wall id = getUniqueId(baseId + "-baseFW"); Spatial baseFW = factory.makeBlock(id, wHole, hBase, dBaseNW, color); baseFW.setLocalTranslation(-wBase / 2 + wBaseB + wHole / 2, 0, dBase / 2 - dBaseNW / 2); base.attachChild(baseFW); // dock base divider wall 1 id = getUniqueId(baseId + "-baseDW1"); float dBaseDW = (dBase - dHole * 4) * 0.5f / 3; Spatial baseDW1 = factory.makeBlock(id, wHole, hBase, dBaseDW, color); baseDW1.setLocalTranslation(-wBase / 2 + wBaseB + wHole / 2, 0, -dBase / 2 + dBaseNW + dHole + dBaseDW / 2); base.attachChild(baseDW1); // dock base divider wall 2 id = getUniqueId(baseId + "-baseDW2"); Spatial baseDW2 = factory.makeBlock(id, wHole, hBase, dBaseDW, color); baseDW2.setLocalTranslation(-wBase / 2 + wBaseB + wHole / 2, 0, -dBase / 2 + dBaseNW + dHole * 2 + dBaseDW + dBaseDW / 2); base.attachChild(baseDW2); // dock base divider wall 3 id = getUniqueId(baseId + "-baseDW3"); Spatial baseDW3 = factory.makeBlock(id, wHole, hBase, dBaseDW, color); baseDW3.setLocalTranslation(-wBase / 2 + wBaseB + wHole / 2, 0, -dBase / 2 + dBaseNW + dHole * 3 + dBaseDW * 2 + dBaseDW / 2); base.attachChild(baseDW3); // slots float slotX = -wBase / 2 + wBaseB + wHole / 2; float slotY = hBase / 2 - (hHole + 0.5f * thSlot) / 2; float[] slotZ = new float[NUM_MODULES]; for (int i = 0; i < NUM_MODULES; ++i) { id = getUniqueId(baseId + "-slot" + (i + 1)); Spatial slot = factory.makeBoxContainer(id, wHole, hHole + thSlot, dHole, thSlot, ColorRGBA.DarkGray); slotZ[i] = -dBase / 2 + dBaseNW + dHole / 2 + (dHole + dBaseDW) * i; slot.setLocalTranslation(slotX, slotY, slotZ[i]); base.attachChild(slot); id = getUniqueId(groupId + "-assemblyPoint" + (i + 1)); Node att = new Node(id); att.setLocalTranslation(slotX, slotY - hHole / 2, slotZ[i]); att.setLocalRotation(new Quaternion().fromAngles(-FastMath.HALF_PI, 0, 0)); att.setUserData("assembly", "cartridgeSlot"); att.setUserData("assemblyEnd", 0); base.attachChild(att); } // annotate... dockNode.setUserData("obj_shape", "dock-body"); dockNode.setUserData("obj_color", color); dockNode.setUserData("obj_width", xspan); dockNode.setUserData("obj_height", yspan); dockNode.setUserData("obj_depth", zspan); dockNode.setUserData("obj_baseWidth", wBase); dockNode.setUserData("obj_baseHeight", hBase); dockNode.setUserData("obj_baseDepth", dBase); dockNode.setUserData("obj_slotWidth", wHole - thSlot * 2); dockNode.setUserData("obj_slotHeight", hHole); dockNode.setUserData("obj_slotDepth", dHole - thSlot * 2); for (int i = 0; i < NUM_MODULES; ++i) { dockNode.setUserData("obj_slot" + (i + 1) + "OffsetX", slotX); dockNode.setUserData("obj_slot" + (i + 1) + "OffsetY", slotZ[i]); dockNode.setUserData("obj_slot" + (i + 1) + "OffsetZ", slotY); } // panel String panelId = getUniqueId(groupId + "-dock-panel"); Node panel = new Node(id); float panelX = wBase / 2 - wPanel / 2; float panelY = hBase / 2 + hPanel / 2; panel.setLocalTranslation(panelX, panelY, 0); // panel cover id = getUniqueId(panelId + "-panelCover"); Spatial panelCover = factory.makeBlock(id, wPanel, hPanel, dBase, color); panel.attachChild(panelCover); // switches Node[] switchButton = new Node[NUM_MODULES]; float switchX = -wPanel / 2 + wPanelToSwitch + wSwitch / 2; float switchY = hPanel / 2 + hSwitch / 2; for (int i = 0; i < NUM_MODULES; ++i) { id = getUniqueId(panelId + "-switch" + (i + 1)); Node switchNode = new Node(id); switchNode.setLocalTranslation(switchX, switchY, slotZ[i]); panel.attachChild(switchNode); // (base) id = getUniqueId(panelId + "-switch" + (i + 1) + "-base"); Spatial switchBase = factory.makeBlock(id, wSwitch, hSwitch, dSwitch, ColorRGBA.White); switchNode.attachChild(switchBase); // (button) switchButton[i] = new Node(groupId + "-switch" + (i + 1)); switchButton[i].setLocalTranslation(0, -0.1f, 0); Spatial b1 = factory.makeBlock("b1", 0.6f, 0.3f, 0.3f, ColorRGBA.DarkGray); b1.setLocalTranslation(-0.25f, 0, 0); b1.setLocalRotation(new Quaternion().fromAngles(0, 0, -3 * FastMath.DEG_TO_RAD)); Spatial b2 = factory.makeBlock("b2", 0.6f, 0.3f, 0.3f, ColorRGBA.DarkGray); b2.setLocalTranslation(0.25f, 0, 0); b2.setLocalRotation(new Quaternion().fromAngles(0, 0, 3 * FastMath.DEG_TO_RAD)); switchButton[i].attachChild(b1); switchButton[i].attachChild(b2); switchNode.attachChild(switchButton[i]); } // annotate... dockNode.setUserData("obj_switchWidth", 1.2f); dockNode.setUserData("obj_switchDepth", 0.3f); for (int i = 0; i < NUM_MODULES; ++i) { dockNode.setUserData("obj_switch" + (i + 1) + "OffsetX", panelX + switchX); dockNode.setUserData("obj_switch" + (i + 1) + "OffsetY", slotZ[i]); dockNode.setUserData("obj_switch" + (i + 1) + "OffsetZ", panelY + switchY); } // indicator lights Node[] indicatorLights = new Node[NUM_MODULES]; float lightX = -wPanel / 2 + wPanelToSwitch + wSwitch + wSwitchToIndicator + rIndicator; float lightY = hPanel / 2 + hIndicator / 2; for (int i = 0; i < NUM_MODULES; ++i) { id = getUniqueId(panelId + "-indicator" + (i + 1)); Node indicator = new Node(id); indicator.setLocalTranslation(lightX, lightY, slotZ[i]); panel.attachChild(indicator); // (base) id = getUniqueId(panelId + "-indicator" + (i + 1) + "-base"); Spatial indicatorBase = factory.makeCylinder(id, rIndicator, hIndicator, ColorRGBA.White); Quaternion rotX90 = new Quaternion().fromAngles(FastMath.HALF_PI, 0, 0); indicatorBase.setLocalRotation(rotX90); indicator.attachChild(indicatorBase); // (LEDs) id = getUniqueId(groupId + "-light" + (i + 1)); indicatorLights[i] = new Node(id); indicator.attachChild(indicatorLights[i]); // (green LED) Spatial greenLight = factory.makeCylinder("green", rIndicator / 5, 0.005f, ColorRGBA.Green); greenLight.setLocalTranslation(rIndicator / 2, hIndicator / 2 + 0.005f / 2, 0); greenLight.setLocalRotation(rotX90); indicatorLights[i].attachChild(greenLight); // (red LED) Spatial redLight = factory.makeCylinder("red", rIndicator / 5, 0.01f, ColorRGBA.Red); redLight.setLocalTranslation(-rIndicator / 2, hIndicator / 2 + 0.005f / 2, 0); redLight.setLocalRotation(rotX90); indicatorLights[i].attachChild(redLight); } // annotate... dockNode.setUserData("obj_lightRadius", rIndicator); for (int i = 0; i < NUM_MODULES; ++i) { dockNode.setUserData("obj_light" + (i + 1) + "OffsetX", panelX + lightX); dockNode.setUserData("obj_light" + (i + 1) + "OffsetY", slotZ[i]); dockNode.setUserData("obj_light" + (i + 1) + "OffsetZ", panelY + lightY); } dockOffsetNode.attachChild(base); dockOffsetNode.attachChild(panel); // dock front id = getUniqueId(groupId + "-dock-front"); Spatial front = factory.makeBlock(id, xThickness, yspan, zspan, color); front.setLocalTranslation(wBase / 2 + xThickness / 2, 0, 0); dockNode.attachChild(front); // dock handle id = getUniqueId(groupId + "-dock-handle"); Spatial handle = factory.makeBlock(id, handleXspan, handleYspan, handleZspan, handleColor); float handleY = yspan * 0.3583f; float handleX = wBase / 2 + xThickness + handleXspan / 2; handle.setLocalTranslation(handleX, handleY, 0); dockNode.attachChild(handle); dockNode.setUserData("obj_handleWidth", handleXspan); dockNode.setUserData("obj_handleHeight", handleYspan); dockNode.setUserData("obj_handleDepth", handleZspan); dockNode.setUserData("obj_handleOffsetX", handleX); dockNode.setUserData("obj_handleOffsetY", 0); dockNode.setUserData("obj_handleOffsetZ", handleY); dockNode.setUserData("obj_handleColor", handleColor); inventory.addItem(dockNode, mass); // for (int i = 0; i < NUM_MODULES; ++i) { // // LED function // IndicatorSetControl ilFunc = new IndicatorSetControl(inventory, indicatorLights[i]); // inventory.registerStateControl(indicatorLights[i], ilFunc); // // switch function // ToggleSwitchControl sFunc = new ToggleSwitchControl(inventory, switchButton[i], ilFunc); // inventory.registerStateControl(switchButton[i], sFunc); // // init states // ilFunc.setState(lightStates[i]); // sFunc.setState(switchStates[i]); // sliding joint MySliderJoint joint = inventory.addSliderJoint(dockNode, caseNode, Vector3f.ZERO, Vector3f.ZERO, null, null, 0, wBase, false); joint.setDampingDirLin(1); joint.setDampingDirAng(1); joint.setSoftnessOrthoLin(1); joint.setSoftnessOrthoAng(1); } private Vector3f parseVector3(String str) { Pattern pattern = Pattern.compile("^\\s*\\((\\-?\\d*(\\.\\d+)?)\\s*,\\s*(\\-?\\d*(\\.\\d+)?)\\s*,\\s*(\\-?\\d*(\\.\\d+)?)\\)\\s*$"); Matcher m = pattern.matcher(str); if (m.find()) { float x = Float.parseFloat(m.group(1)); float y = Float.parseFloat(m.group(5)); float z = -Float.parseFloat(m.group(3)); return new Vector3f(x, y, z); } throw new IllegalArgumentException("could not parse '" + str + "'"); } private ColorRGBA parseColor(String str) { if (str.equals("black")) { return ColorRGBA.Black; } else if (str.equals("blue")) { return ColorRGBA.Blue; } else if (str.equals("brown")) { return ColorRGBA.Brown; } else if (str.equals("cyan")) { return ColorRGBA.Cyan; } else if (str.equals("darkgray")) { return ColorRGBA.DarkGray; } else if (str.equals("gray")) { return ColorRGBA.Gray; } else if (str.equals("green")) { return ColorRGBA.Green; } else if (str.equals("lightgray")) { return ColorRGBA.LightGray; } else if (str.equals("magenta")) { return ColorRGBA.Magenta; } else if (str.equals("orange")) { return ColorRGBA.Orange; } else if (str.equals("pink")) { return ColorRGBA.Pink; } else if (str.equals("red")) { return ColorRGBA.Red; } else if (str.equals("white")) { return ColorRGBA.White; } else if (str.equals("yellow")) { return ColorRGBA.Yellow; } else { Pattern pattern = Pattern.compile("^\\s*#([0-9a-fA-F]{2})([0-9a-fA-F]{2})([0-9a-fA-F]{2})\\s*$"); Matcher m = pattern.matcher(str); if (m.find()) { int r = Integer.parseInt(m.group(1), 16); int g = Integer.parseInt(m.group(2), 16); int b = Integer.parseInt(m.group(3), 16); ColorRGBA color = new ColorRGBA(); color.fromIntRGBA((r << 24) + (g << 16) + (b << 8) + 0xff); return color; } throw new IllegalArgumentException("could not parse '" + str + "'"); } } public void dropRandomBlock() { final ColorRGBA[] colors = new ColorRGBA[] { ColorRGBA.Red, ColorRGBA.Blue, ColorRGBA.Yellow, ColorRGBA.Green, ColorRGBA.Brown, ColorRGBA.Cyan, ColorRGBA.Magenta, ColorRGBA.Orange }; Spatial s = factory.makeBlock(getUniqueId("largeblock"), 1.5f, 1.5f, 1.5f, colors[random.nextInt(colors.length)]); s.setLocalTranslation( (random.nextFloat() * 2 - 1) * (tableWidth / 2), 10, (random.nextFloat() * 2 - 1) * (tableDepth / 2)); s.setLocalRotation(new Quaternion().fromAngleAxis( FastMath.HALF_PI * random.nextFloat(), Vector3f.UNIT_XYZ)); inventory.addItem(s, 1); } public void dropRandomStackOfBlocks(int blockCount) { final Vector3f BOX_SIZE = new Vector3f(1, 1, 1); final ColorRGBA[] colors = new ColorRGBA[] { ColorRGBA.Red, ColorRGBA.Blue, ColorRGBA.Yellow, ColorRGBA.Green, ColorRGBA.Brown, ColorRGBA.Cyan, ColorRGBA.Magenta, ColorRGBA.Orange}; Vector3f pos = new Vector3f( (random.nextFloat() * 2 - 1) * (tableWidth / 2), BOX_SIZE.y / 2, (random.nextFloat() * 2 - 1) * (tableDepth / 2)); Quaternion rot = new Quaternion().fromAngleAxis( FastMath.HALF_PI * random.nextFloat(), Vector3f.UNIT_Y); for (int i = 0; i < blockCount; ++i) { Spatial s = factory.makeBlock(getUniqueId("smallblock"), BOX_SIZE.x, BOX_SIZE.y, BOX_SIZE.z, colors[random.nextInt(colors.length)]); s.setLocalTranslation(pos); s.setLocalRotation(rot); inventory.addItem(s, 1); pos.y += BOX_SIZE.y; } } public void dropRandomBoxContainer() { Spatial boxContainer = factory.makeBoxContainer(getUniqueId("container"), 5, 3, 5, 0.5f, ColorRGBA.Gray); boxContainer.setLocalTranslation((random.nextFloat() * 2 - 1) * (tableWidth / 2), 10, (random.nextFloat() * 2 - 1) * (tableDepth / 2)); boxContainer.setLocalRotation(new Quaternion().fromAngleAxis( FastMath.HALF_PI * random.nextFloat(), Vector3f.UNIT_XYZ)); inventory.addItem(boxContainer, 3); } private boolean isValidId(String id) { return id != null && !id.isEmpty() && !uniqueIds.contains(id); } private String getUniqueId(String prefix) { if (prefix == null || prefix.isEmpty()) { prefix = "obj"; } String id = prefix; while (!isValidId(id)) { id = prefix + "#" + (idSN++); } uniqueIds.add(id); return id; } public void initKeys(InputManager inputManager) { inputManager.addMapping(name + "MakeBlock", new KeyTrigger(KeyInput.KEY_B)); inputManager.addMapping(name + "MakeStack", new KeyTrigger(KeyInput.KEY_N)); inputManager.addMapping(name + "ClearTable", new KeyTrigger(KeyInput.KEY_C)); inputManager.addListener(this, name + "MakeBlock"); inputManager.addListener(this, name + "MakeStack"); inputManager.addListener(this, name + "ClearTable"); } @Override public void onAction(String eName, boolean isPressed, float tpf) { if (!enabled) { return; } if (eName.equals(name + "MakeBlock")) { if (!isPressed) { dropRandomBlock(); } } else if (eName.equals(name + "MakeStack")) { if (!isPressed) { dropRandomStackOfBlocks(5); } } else if (eName.equals(name + "ClearTable")) { if (!isPressed) { inventory.removeAllFreeItems(); } } } }
package org.hisp.dhis.mapping; import org.hisp.dhis.common.AnalyticalObjectStore; import org.hisp.dhis.common.GenericAnalyticalObjectService; import org.hisp.dhis.indicator.Indicator; import org.hisp.dhis.indicator.IndicatorService; import org.hisp.dhis.organisationunit.OrganisationUnit; import org.hisp.dhis.organisationunit.OrganisationUnitGroupSet; import org.hisp.dhis.organisationunit.OrganisationUnitService; import org.hisp.dhis.period.Period; import org.hisp.dhis.period.PeriodService; import org.hisp.dhis.period.RelativePeriods; import org.springframework.transaction.annotation.Transactional; import java.util.List; /** * @author Jan Henrik Overland */ @Transactional public class DefaultMappingService extends GenericAnalyticalObjectService<MapView> implements MappingService { // Dependencies private MapStore mapStore; public void setMapStore( MapStore mapStore ) { this.mapStore = mapStore; } private MapViewStore mapViewStore; public void setMapViewStore( MapViewStore mapViewStore ) { this.mapViewStore = mapViewStore; } private ExternalMapLayerStore externalMapLayerStore; public void setExternalMapLayerStore( ExternalMapLayerStore externalMapLayerStore ) { this.externalMapLayerStore = externalMapLayerStore; } private OrganisationUnitService organisationUnitService; public void setOrganisationUnitService( OrganisationUnitService organisationUnitService ) { this.organisationUnitService = organisationUnitService; } private IndicatorService indicatorService; public void setIndicatorService( IndicatorService indicatorService ) { this.indicatorService = indicatorService; } private PeriodService periodService; public void setPeriodService( PeriodService periodService ) { this.periodService = periodService; } // MappingService implementation @Override protected AnalyticalObjectStore<MapView> getAnalyticalObjectStore() { return mapViewStore; } // Map @Override public int addMap( Map map ) { map.getMapViews().forEach( mapView -> mapView.setAutoFields() ); return mapStore.save( map ); } @Override public void updateMap( Map map ) { mapStore.update( map ); } @Override public Map getMap( int id ) { return mapStore.get( id ); } @Override public Map getMap( String uid ) { return mapStore.getByUid( uid ); } @Override public Map getMapNoAcl( String uid ) { return mapStore.getByUidNoAcl( uid ); } @Override public void deleteMap( Map map ) { mapStore.delete( map ); } @Override public List<Map> getAllMaps() { return mapStore.getAll(); } @Override public List<Map> getMapsBetweenLikeName( String name, int first, int max ) { return mapStore.getAllLikeName( name, first, max ); } // MapView @Override public int addMapView( MapView mapView ) { return mapViewStore.save( mapView ); } @Override public void updateMapView( MapView mapView ) { mapViewStore.update( mapView ); } @Override public void deleteMapView( MapView mapView ) { mapViewStore.delete( mapView ); } @Override public MapView getMapView( int id ) { return mapViewStore.get( id ); } @Override public MapView getMapView( String uid ) { MapView mapView = mapViewStore.getByUid( uid ); return mapView; } @Override public MapView getMapViewByName( String name ) { return mapViewStore.getByName( name ); } @Override public MapView getIndicatorLastYearMapView( String indicatorUid, String organisationUnitUid, int level ) { MapView mapView = new MapView(); Period period = periodService.reloadPeriod( new RelativePeriods().setThisYear( true ).getRelativePeriods() .iterator().next() ); Indicator indicator = indicatorService.getIndicator( indicatorUid ); OrganisationUnit unit = organisationUnitService.getOrganisationUnit( organisationUnitUid ); mapView.addDataDimensionItem( indicator ); mapView.getPeriods().add( period ); mapView.getOrganisationUnits().add( unit ); mapView.getOrganisationUnitLevels().add( level ); mapView.setName( indicator.getName() ); return mapView; } @Override public List<MapView> getMapViewsByOrganisationUnitGroupSet( OrganisationUnitGroupSet groupSet ) { return mapViewStore.getByOrganisationUnitGroupSet( groupSet ); } @Override public List<MapView> getAllMapViews() { return mapViewStore.getAll(); } @Override public List<MapView> getMapViewsBetweenByName( String name, int first, int max ) { return mapViewStore.getAllLikeName( name, first, max ); } @Override public int countMapViewMaps( MapView mapView ) { return mapStore.countMapViewMaps( mapView ); } // ExternalMapLayer @Override public int addExternalMapLayer( ExternalMapLayer externalMapLayer ) { return externalMapLayerStore.save( externalMapLayer ); } @Override public void updateExternalMapLayer( ExternalMapLayer externalMapLayer ) { externalMapLayerStore.update( externalMapLayer ); } @Override public void deleteExternalMapLayer( ExternalMapLayer externalMapLayer ) { externalMapLayerStore.delete( externalMapLayer ); } @Override public ExternalMapLayer getExternalMapLayer( int id ) { return externalMapLayerStore.get( id ); } @Override public ExternalMapLayer getExternalMapLayer( String uid ) { return externalMapLayerStore.getByUid( uid ); } @Override public ExternalMapLayer getExternalMapLayerByName( String name ) { return externalMapLayerStore.getByName( name ); } @Override public List<ExternalMapLayer> getAllExternalMapLayers() { return externalMapLayerStore.getAll(); } }
package org.cactoos.map; import java.util.Map; import org.cactoos.func.FuncOf; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; import org.hamcrest.core.IsEqual; import org.hamcrest.core.IsNot; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.llorllale.cactoos.matchers.MatcherOf; /** * Test case for {@link MapEnvelope}. * * @since 0.4 * @checkstyle JavadocMethodCheck (500 lines) * @checkstyle ClassDataAbstractionCouplingCheck (500 lines) * @checkstyle DiamondOperatorCheck (500 lines) */ @SuppressWarnings("PMD.TooManyMethods") public final class MapEnvelopeTest { /** * A rule for handling an exception. */ @Rule public final ExpectedException exception = ExpectedException.none(); @Test public void putThrowsException() { this.exception.expect(UnsupportedOperationException.class); this.exception.expectMessage( "#put() is not supported, it's a read-only map" ); MatcherAssert.assertThat( "put method did not throw exception", new MapNoNulls<>( new MapOf<Integer, Integer>( new MapEntry<Integer, Integer>(0, -1) ) ), new MatcherOf<>( new FuncOf<>( (map) -> map.put(2, 2) )) ); } @Test public void removeThrowsException() { this.exception.expect(UnsupportedOperationException.class); this.exception.expectMessage( "#remove() is not supported, it's a read-only map" ); MatcherAssert.assertThat( "remove method did not throw exception", new MapNoNulls<>( new MapOf<Integer, Integer>( new MapEntry<>(0, -1) ) ), new MatcherOf<>( new FuncOf<>( (map) -> map.remove(0) )) ); } @Test public void putAllThrowsException() { this.exception.expect(UnsupportedOperationException.class); this.exception.expectMessage( "#putAll() is not supported, it's a read-only map" ); MatcherAssert.assertThat( "putAll method did not throw exception", new MapNoNulls<>( new MapOf<Integer, Integer>( new MapEntry<>(0, -1) ) ), new MatcherOf<>( new FuncOf<>( (map) -> map.putAll(new MapOf<Integer, Integer>()) )) ); } @Test public void clearThrowsException() { this.exception.expect(UnsupportedOperationException.class); this.exception.expectMessage( "#clear() is not supported, it's a read-only map" ); MatcherAssert.assertThat( "clear method did not throw exception", new MapNoNulls<>( new MapOf<Integer, Integer>( new MapEntry<>(0, -1) ) ), new MatcherOf<>( new FuncOf<>( Map::clear )) ); } @Test public void mapIsEmptyTrue() { MatcherAssert.assertThat( "#isEmpty() returns false for empty map", new MapNoNulls<>( new MapOf<Integer, Integer>() ).isEmpty(), Matchers.is(true) ); } @Test public void mapIsEmptyFalse() { MatcherAssert.assertThat( "#isEmpty() returns true for not empty map", new MapNoNulls<>( new MapOf<Integer, Integer>( new MapEntry<>(1, 0) ) ).isEmpty(), Matchers.is(false) ); } @Test public void mapContainsKeyTrue() { MatcherAssert.assertThat( "contains key returns false with exist key", new MapNoNulls<>( new MapOf<Integer, Integer>( new MapEntry<>(1, 0) ) ).containsKey(1), Matchers.is(true) ); } @Test public void mapContainsKeyFalse() { MatcherAssert.assertThat( "contains key returns true with absent key", new MapNoNulls<>( new MapOf<Integer, Integer>( new MapEntry<>(1, 0) ) ).containsKey(0), Matchers.is(false) ); } @Test public void mapContainsValueTrue() { MatcherAssert.assertThat( "contains value returns false with exist value", new MapNoNulls<>( new MapOf<Integer, Integer>( new MapEntry<>(1, 0) ) ).containsValue(0), Matchers.is(true) ); } @Test public void mapContainsValueFalse() { MatcherAssert.assertThat( "contains value returns true with absent value", new MapNoNulls<>( new MapOf<Integer, Integer>( new MapEntry<>(1, 0) ) ).containsValue(1), Matchers.is(false) ); } @Test public void mapEqualsToItself() { final MapOf<String, String> map = new MapOf<String, String>(new MapEntry<>("key", "value")); MatcherAssert.assertThat( "Map doesn't equal to itself", map, new IsEqual<>(map) ); } @Test public void mapNotEqualsToAnotherClass() { final MapOf<String, String> map = new MapOf<String, String>(new MapEntry<>("key1", "value1")); MatcherAssert.assertThat( "Map equals to an instance of another type", map, new IsNot<>( new IsEqual<>("Totally different type") ) ); } @Test public void mapEqualsToMapWithSameEntries() { final String key = "key2"; final String value = "value2"; final MapEntry<String, String> input = new MapEntry<>(key, value); final MapEntry<String, String> expected = new MapEntry<>(key, value); MatcherAssert.assertThat( "Map doesn't equal to another map with same entries", new MapOf<String, String>(input), new IsEqual<>(new MapOf<String, String>(expected)) ); } @Test(expected = NullPointerException.class) public void equalFailsOnNull() { final MapEntry<String, String> first = new MapEntry<>("key3", "value3"); final MapEntry<String, String> second = new MapEntry<>("key4", null); MatcherAssert.assertThat( "Map allows null values, but shouldn't", new MapOf<String, String>(first, second), new IsEqual<>(new MapOf<String, String>(first, second)) ); } @Test public void mapNotEqualsToOtherWithDifferentKeys() { final String value = "value5"; MatcherAssert.assertThat( "Map equals to another map with different keys", new MapOf<String, String>(new MapEntry<>("key5", value)), new IsNot<>( new IsEqual<>( new MapOf<String, String>( new MapEntry<>("key6", value) ) ) ) ); } @Test public void mapNotEqualsToOtherWithDifferentValues() { final String key = "key7"; MatcherAssert.assertThat( "Map equals to another map with different values", new MapOf<String, String>(new MapEntry<>(key, "value7")), new IsNot<>( new IsEqual<>( new MapOf<String, String>( new MapEntry<>(key, "value8") ) ) ) ); } @Test public void hashCodeDependsOnItems() { final String key = "key9"; final String value = "value9"; final MapEntry<String, String> input = new MapEntry<>(key, value); final MapEntry<String, String> expected = new MapEntry<>(key, value); MatcherAssert.assertThat( "hashCode returns different results for same entries", new MapOf<String, String>(input).hashCode(), new IsEqual<>(new MapOf<String, String>(expected).hashCode()) ); } @Test(expected = NullPointerException.class) public void hashCodeFailsOnNull() { final MapEntry<String, String> first = new MapEntry<>("key10", "value10"); final MapEntry<String, String> second = new MapEntry<>("key11", null); new MapOf<String, String>(first, second).hashCode(); } }
package org.hisp.dhis.webapi.controller.event; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.io.ByteSource; import com.vividsolutions.jts.io.ParseException; import org.apache.commons.io.IOUtils; import org.hisp.dhis.category.CategoryOptionCombo; import org.hisp.dhis.common.AssignedUserSelectionMode; import org.hisp.dhis.common.DhisApiVersion; import org.hisp.dhis.common.Grid; import org.hisp.dhis.common.IdSchemes; import org.hisp.dhis.common.OrganisationUnitSelectionMode; import org.hisp.dhis.common.PagerUtils; import org.hisp.dhis.common.cache.CacheStrategy; import org.hisp.dhis.commons.util.StreamUtils; import org.hisp.dhis.commons.util.TextUtils; import org.hisp.dhis.dataelement.DataElement; import org.hisp.dhis.dataelement.DataElementService; import org.hisp.dhis.dxf2.common.ImportOptions; import org.hisp.dhis.dxf2.common.OrderParams; import org.hisp.dhis.dxf2.events.event.DataValue; import org.hisp.dhis.dxf2.events.event.Event; import org.hisp.dhis.dxf2.events.event.EventSearchParams; import org.hisp.dhis.dxf2.events.event.EventService; import org.hisp.dhis.dxf2.events.event.Events; import org.hisp.dhis.dxf2.events.event.ImportEventsTask; import org.hisp.dhis.dxf2.events.event.csv.CsvEventService; import org.hisp.dhis.dxf2.events.report.EventRowService; import org.hisp.dhis.dxf2.events.report.EventRows; import org.hisp.dhis.dxf2.events.trackedentity.TrackedEntityInstanceService; import org.hisp.dhis.dxf2.importsummary.ImportStatus; import org.hisp.dhis.dxf2.importsummary.ImportSummaries; import org.hisp.dhis.dxf2.importsummary.ImportSummary; import org.hisp.dhis.dxf2.utils.InputUtils; import org.hisp.dhis.dxf2.webmessage.WebMessage; import org.hisp.dhis.dxf2.webmessage.WebMessageException; import org.hisp.dhis.dxf2.webmessage.WebMessageUtils; import org.hisp.dhis.dxf2.webmessage.responses.FileResourceWebMessageResponse; import org.hisp.dhis.event.EventStatus; import org.hisp.dhis.fieldfilter.FieldFilterParams; import org.hisp.dhis.fieldfilter.FieldFilterService; import org.hisp.dhis.fileresource.FileResource; import org.hisp.dhis.fileresource.FileResourceDomain; import org.hisp.dhis.fileresource.FileResourceService; import org.hisp.dhis.fileresource.FileResourceStorageStatus; import org.hisp.dhis.importexport.ImportStrategy; import org.hisp.dhis.node.NodeUtils; import org.hisp.dhis.node.Preset; import org.hisp.dhis.node.types.RootNode; import org.hisp.dhis.program.Program; import org.hisp.dhis.program.ProgramStageInstanceService; import org.hisp.dhis.program.ProgramStatus; import org.hisp.dhis.query.Order; import org.hisp.dhis.render.RenderService; import org.hisp.dhis.scheduling.JobConfiguration; import org.hisp.dhis.scheduling.SchedulingManager; import org.hisp.dhis.schema.Schema; import org.hisp.dhis.schema.SchemaService; import org.hisp.dhis.system.grid.GridUtils; import org.hisp.dhis.user.CurrentUserService; import org.hisp.dhis.webapi.mvc.annotation.ApiVersion; import org.hisp.dhis.webapi.service.ContextService; import org.hisp.dhis.webapi.service.WebMessageService; import org.hisp.dhis.webapi.utils.ContextUtils; import org.hisp.dhis.webapi.utils.FileResourceUtils; import org.hisp.dhis.webapi.webdomain.WebOptions; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpHeaders; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.util.StringUtils; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.zip.GZIPOutputStream; import static org.hisp.dhis.dxf2.webmessage.WebMessageUtils.jobConfigurationReport; import static org.hisp.dhis.scheduling.JobType.EVENT_IMPORT; /** * @author Morten Olav Hansen <mortenoh@gmail.com> */ @Controller @RequestMapping( value = EventController.RESOURCE_PATH ) @ApiVersion( { DhisApiVersion.DEFAULT, DhisApiVersion.ALL } ) public class EventController { public static final String RESOURCE_PATH = "/events"; private static final String META_DATA_KEY_DE = "de"; // Dependencies @Autowired private CurrentUserService currentUserService; @Autowired private SchedulingManager schedulingManager; @Autowired private EventService eventService; @Autowired private CsvEventService csvEventService; @Autowired private EventRowService eventRowService; @Autowired private DataElementService dataElementService; @Autowired private WebMessageService webMessageService; @Autowired private InputUtils inputUtils; @Autowired private RenderService renderService; @Autowired private ProgramStageInstanceService programStageInstanceService; @Autowired private FileResourceService fileResourceService; @Autowired private FieldFilterService fieldFilterService; @Autowired private ContextService contextService; @Autowired private SchemaService schemaService; @Autowired protected TrackedEntityInstanceService entityInstanceService; @Autowired private ContextUtils contextUtils; private Schema schema; protected Schema getSchema() { if ( schema == null ) { schema = schemaService.getDynamicSchema( Event.class ); } return schema; } // Read // Query Read @RequestMapping( value = "/query", method = RequestMethod.GET, produces = { ContextUtils.CONTENT_TYPE_JSON, ContextUtils.CONTENT_TYPE_JAVASCRIPT } ) public @ResponseBody Grid queryEventsJson( @RequestParam( required = false ) String program, @RequestParam( required = false ) String programStage, @RequestParam( required = false ) ProgramStatus programStatus, @RequestParam( required = false ) Boolean followUp, @RequestParam( required = false ) String trackedEntityInstance, @RequestParam( required = false ) String orgUnit, @RequestParam( required = false ) OrganisationUnitSelectionMode ouMode, @RequestParam( required = false ) AssignedUserSelectionMode assignedUserMode, @RequestParam( required = false ) String assignedUser, @RequestParam( required = false ) Date startDate, @RequestParam( required = false ) Date endDate, @RequestParam( required = false ) Date dueDateStart, @RequestParam( required = false ) Date dueDateEnd, @RequestParam( required = false ) Date lastUpdated, @RequestParam( required = false ) Date lastUpdatedStartDate, @RequestParam( required = false ) Date lastUpdatedEndDate, @RequestParam( required = false ) EventStatus status, @RequestParam( required = false ) String attributeCc, @RequestParam( required = false ) String attributeCos, @RequestParam( required = false ) boolean skipMeta, @RequestParam( required = false ) Integer page, @RequestParam( required = false ) Integer pageSize, @RequestParam( required = false ) boolean totalPages, @RequestParam( required = false ) Boolean skipPaging, @RequestParam( required = false ) Boolean paging, @RequestParam( required = false ) String order, @RequestParam( required = false ) String attachment, @RequestParam( required = false, defaultValue = "false" ) boolean includeDeleted, @RequestParam( required = false ) String event, @RequestParam( required = false ) Set<String> filter, @RequestParam( required = false ) Set<String> dataElement, @RequestParam( required = false, defaultValue = "false" ) boolean includeAllDataElements, @RequestParam Map<String, String> parameters, IdSchemes idSchemes, Model model, HttpServletResponse response, HttpServletRequest request ) throws WebMessageException { List<String> fields = Lists.newArrayList( contextService.getParameterValues( "fields" ) ); if ( fields.isEmpty() ) { fields.addAll( Preset.ALL.getFields() ); } CategoryOptionCombo attributeOptionCombo = inputUtils.getAttributeOptionCombo( attributeCc, attributeCos, false ); if ( attributeOptionCombo == null ) { throw new WebMessageException( WebMessageUtils.conflict( "Illegal attribute option combo identifier: " + attributeCc + " " + attributeCos ) ); } Set<String> eventIds = TextUtils.splitToArray( event, TextUtils.SEMICOLON ); Set<String> assignedUserIds = TextUtils.splitToArray( assignedUser, TextUtils.SEMICOLON ); lastUpdatedStartDate = lastUpdatedStartDate != null ? lastUpdatedStartDate : lastUpdated; skipPaging = PagerUtils.isSkipPaging( skipPaging, paging ); EventSearchParams params = eventService.getFromUrl( program, programStage, programStatus, followUp, orgUnit, ouMode, trackedEntityInstance, startDate, endDate, dueDateStart, dueDateEnd, lastUpdatedStartDate, lastUpdatedEndDate, null, status, attributeOptionCombo, idSchemes, page, pageSize, totalPages, skipPaging, null, getGridOrderParams( order ), false, eventIds, assignedUserMode, assignedUserIds, filter, dataElement, includeAllDataElements, includeDeleted ); contextUtils.configureResponse( response, ContextUtils.CONTENT_TYPE_JSON, CacheStrategy.NO_CACHE ); return eventService.getEventsGrid( params ); } @RequestMapping( value = "/query", method = RequestMethod.GET, produces = ContextUtils.CONTENT_TYPE_XML ) public void queryEventsXml( @RequestParam( required = false ) String program, @RequestParam( required = false ) String programStage, @RequestParam( required = false ) ProgramStatus programStatus, @RequestParam( required = false ) Boolean followUp, @RequestParam( required = false ) String trackedEntityInstance, @RequestParam( required = false ) String orgUnit, @RequestParam( required = false ) OrganisationUnitSelectionMode ouMode, @RequestParam( required = false ) AssignedUserSelectionMode assignedUserMode, @RequestParam( required = false ) String assignedUser, @RequestParam( required = false ) Date startDate, @RequestParam( required = false ) Date endDate, @RequestParam( required = false ) Date dueDateStart, @RequestParam( required = false ) Date dueDateEnd, @RequestParam( required = false ) Date lastUpdated, @RequestParam( required = false ) Date lastUpdatedStartDate, @RequestParam( required = false ) Date lastUpdatedEndDate, @RequestParam( required = false ) EventStatus status, @RequestParam( required = false ) String attributeCc, @RequestParam( required = false ) String attributeCos, @RequestParam( required = false ) boolean skipMeta, @RequestParam( required = false ) Integer page, @RequestParam( required = false ) Integer pageSize, @RequestParam( required = false ) boolean totalPages, @RequestParam( required = false ) Boolean skipPaging, @RequestParam( required = false ) Boolean paging, @RequestParam( required = false ) String order, @RequestParam( required = false ) String attachment, @RequestParam( required = false, defaultValue = "false" ) boolean includeDeleted, @RequestParam( required = false ) String event, @RequestParam( required = false ) Set<String> filter, @RequestParam( required = false ) Set<String> dataElement, @RequestParam( required = false, defaultValue = "false" ) boolean includeAllDataElements, @RequestParam Map<String, String> parameters, IdSchemes idSchemes, Model model, HttpServletResponse response, HttpServletRequest request ) throws Exception { List<String> fields = Lists.newArrayList( contextService.getParameterValues( "fields" ) ); if ( fields.isEmpty() ) { fields.addAll( Preset.ALL.getFields() ); } CategoryOptionCombo attributeOptionCombo = inputUtils.getAttributeOptionCombo( attributeCc, attributeCos, false ); if ( attributeOptionCombo == null ) { throw new WebMessageException( WebMessageUtils.conflict( "Illegal attribute option combo identifier: " + attributeCc + " " + attributeCos ) ); } Set<String> eventIds = TextUtils.splitToArray( event, TextUtils.SEMICOLON ); Set<String> assignedUserIds = TextUtils.splitToArray( assignedUser, TextUtils.SEMICOLON ); lastUpdatedStartDate = lastUpdatedStartDate != null ? lastUpdatedStartDate : lastUpdated; skipPaging = PagerUtils.isSkipPaging( skipPaging, paging ); EventSearchParams params = eventService.getFromUrl( program, programStage, programStatus, followUp, orgUnit, ouMode, trackedEntityInstance, startDate, endDate, dueDateStart, dueDateEnd, lastUpdatedStartDate, lastUpdatedEndDate, null, status, attributeOptionCombo, idSchemes, page, pageSize, totalPages, skipPaging,null, getGridOrderParams( order ), false, eventIds, assignedUserMode, assignedUserIds, filter, dataElement, includeAllDataElements, includeDeleted ); contextUtils.configureResponse( response, ContextUtils.CONTENT_TYPE_XML, CacheStrategy.NO_CACHE ); Grid grid = eventService.getEventsGrid( params ); GridUtils.toXml( grid, response.getOutputStream() ); } @RequestMapping( value = "/query", method = RequestMethod.GET, produces = ContextUtils.CONTENT_TYPE_EXCEL ) public void queryEventsXls( @RequestParam( required = false ) String program, @RequestParam( required = false ) String programStage, @RequestParam( required = false ) ProgramStatus programStatus, @RequestParam( required = false ) Boolean followUp, @RequestParam( required = false ) String trackedEntityInstance, @RequestParam( required = false ) String orgUnit, @RequestParam( required = false ) OrganisationUnitSelectionMode ouMode, @RequestParam( required = false ) AssignedUserSelectionMode assignedUserMode, @RequestParam( required = false ) String assignedUser, @RequestParam( required = false ) Date startDate, @RequestParam( required = false ) Date endDate, @RequestParam( required = false ) Date dueDateStart, @RequestParam( required = false ) Date dueDateEnd, @RequestParam( required = false ) Date lastUpdated, @RequestParam( required = false ) Date lastUpdatedStartDate, @RequestParam( required = false ) Date lastUpdatedEndDate, @RequestParam( required = false ) EventStatus status, @RequestParam( required = false ) String attributeCc, @RequestParam( required = false ) String attributeCos, @RequestParam( required = false ) boolean skipMeta, @RequestParam( required = false ) Integer page, @RequestParam( required = false ) Integer pageSize, @RequestParam( required = false ) boolean totalPages, @RequestParam( required = false ) Boolean skipPaging, @RequestParam( required = false ) Boolean paging, @RequestParam( required = false ) String order, @RequestParam( required = false ) String attachment, @RequestParam( required = false, defaultValue = "false" ) boolean includeDeleted, @RequestParam( required = false ) String event, @RequestParam( required = false ) Set<String> filter, @RequestParam( required = false ) Set<String> dataElement, @RequestParam( required = false, defaultValue = "false" ) boolean includeAllDataElements, @RequestParam Map<String, String> parameters, IdSchemes idSchemes, Model model, HttpServletResponse response, HttpServletRequest request ) throws Exception { List<String> fields = Lists.newArrayList( contextService.getParameterValues( "fields" ) ); if ( fields.isEmpty() ) { fields.addAll( Preset.ALL.getFields() ); } CategoryOptionCombo attributeOptionCombo = inputUtils.getAttributeOptionCombo( attributeCc, attributeCos, false ); if ( attributeOptionCombo == null ) { throw new WebMessageException( WebMessageUtils.conflict( "Illegal attribute option combo identifier: " + attributeCc + " " + attributeCos ) ); } Set<String> eventIds = TextUtils.splitToArray( event, TextUtils.SEMICOLON ); Set<String> assignedUserIds = TextUtils.splitToArray( assignedUser, TextUtils.SEMICOLON ); lastUpdatedStartDate = lastUpdatedStartDate != null ? lastUpdatedStartDate : lastUpdated; skipPaging = PagerUtils.isSkipPaging( skipPaging, paging ); EventSearchParams params = eventService.getFromUrl( program, programStage, programStatus, followUp, orgUnit, ouMode, trackedEntityInstance, startDate, endDate, dueDateStart, dueDateEnd, lastUpdatedStartDate, lastUpdatedEndDate, null, status, attributeOptionCombo, idSchemes, page, pageSize, totalPages, skipPaging, null, getGridOrderParams( order ), false, eventIds, assignedUserMode, assignedUserIds, filter, dataElement, includeAllDataElements, includeDeleted ); contextUtils.configureResponse( response, ContextUtils.CONTENT_TYPE_EXCEL, CacheStrategy.NO_CACHE ); Grid grid = eventService.getEventsGrid( params ); GridUtils.toXls( grid, response.getOutputStream() ); } @RequestMapping( value = "/query", method = RequestMethod.GET, produces = ContextUtils.CONTENT_TYPE_CSV ) public void queryEventsCsv( @RequestParam( required = false ) String program, @RequestParam( required = false ) String programStage, @RequestParam( required = false ) ProgramStatus programStatus, @RequestParam( required = false ) Boolean followUp, @RequestParam( required = false ) String trackedEntityInstance, @RequestParam( required = false ) String orgUnit, @RequestParam( required = false ) OrganisationUnitSelectionMode ouMode, @RequestParam( required = false ) AssignedUserSelectionMode assignedUserMode, @RequestParam( required = false ) String assignedUser, @RequestParam( required = false ) Date startDate, @RequestParam( required = false ) Date endDate, @RequestParam( required = false ) Date dueDateStart, @RequestParam( required = false ) Date dueDateEnd, @RequestParam( required = false ) Date lastUpdated, @RequestParam( required = false ) Date lastUpdatedStartDate, @RequestParam( required = false ) Date lastUpdatedEndDate, @RequestParam( required = false ) EventStatus status, @RequestParam( required = false ) String attributeCc, @RequestParam( required = false ) String attributeCos, @RequestParam( required = false ) boolean skipMeta, @RequestParam( required = false ) Integer page, @RequestParam( required = false ) Integer pageSize, @RequestParam( required = false ) boolean totalPages, @RequestParam( required = false ) Boolean skipPaging, @RequestParam( required = false ) Boolean paging, @RequestParam( required = false ) String order, @RequestParam( required = false ) String attachment, @RequestParam( required = false, defaultValue = "false" ) boolean includeDeleted, @RequestParam( required = false ) String event, @RequestParam( required = false ) Set<String> filter, @RequestParam( required = false ) Set<String> dataElement, @RequestParam( required = false, defaultValue = "false" ) boolean includeAllDataElements, @RequestParam Map<String, String> parameters, IdSchemes idSchemes, Model model, HttpServletResponse response, HttpServletRequest request ) throws Exception { List<String> fields = Lists.newArrayList( contextService.getParameterValues( "fields" ) ); if ( fields.isEmpty() ) { fields.addAll( Preset.ALL.getFields() ); } CategoryOptionCombo attributeOptionCombo = inputUtils.getAttributeOptionCombo( attributeCc, attributeCos, false ); if ( attributeOptionCombo == null ) { throw new WebMessageException( WebMessageUtils.conflict( "Illegal attribute option combo identifier: " + attributeCc + " " + attributeCos ) ); } Set<String> eventIds = TextUtils.splitToArray( event, TextUtils.SEMICOLON ); Set<String> assignedUserIds = TextUtils.splitToArray( assignedUser, TextUtils.SEMICOLON ); lastUpdatedStartDate = lastUpdatedStartDate != null ? lastUpdatedStartDate : lastUpdated; skipPaging = PagerUtils.isSkipPaging( skipPaging, paging ); EventSearchParams params = eventService.getFromUrl( program, programStage, programStatus, followUp, orgUnit, ouMode, trackedEntityInstance, startDate, endDate, dueDateStart, dueDateEnd, lastUpdatedStartDate, lastUpdatedEndDate, null, status, attributeOptionCombo, idSchemes, page, pageSize, totalPages, skipPaging, null, getGridOrderParams( order ), false, eventIds, assignedUserMode, assignedUserIds, filter, dataElement, includeAllDataElements, includeDeleted ); contextUtils.configureResponse( response, ContextUtils.CONTENT_TYPE_CSV, CacheStrategy.NO_CACHE ); Grid grid = eventService.getEventsGrid( params ); GridUtils.toCsv( grid, response.getWriter() ); } // Object Read @RequestMapping( method = RequestMethod.GET ) public @ResponseBody RootNode getEvents( @RequestParam( required = false ) String program, @RequestParam( required = false ) String programStage, @RequestParam( required = false ) ProgramStatus programStatus, @RequestParam( required = false ) Boolean followUp, @RequestParam( required = false ) String trackedEntityInstance, @RequestParam( required = false ) String orgUnit, @RequestParam( required = false ) OrganisationUnitSelectionMode ouMode, @RequestParam( required = false ) AssignedUserSelectionMode assignedUserMode, @RequestParam( required = false ) String assignedUser, @RequestParam( required = false ) Date startDate, @RequestParam( required = false ) Date endDate, @RequestParam( required = false ) Date dueDateStart, @RequestParam( required = false ) Date dueDateEnd, @RequestParam( required = false ) Date lastUpdated, @RequestParam( required = false ) Date lastUpdatedStartDate, @RequestParam( required = false ) Date lastUpdatedEndDate, @RequestParam( required = false ) String lastUpdatedDuration, @RequestParam( required = false ) EventStatus status, @RequestParam( required = false ) String attributeCc, @RequestParam( required = false ) String attributeCos, @RequestParam( required = false ) boolean skipMeta, @RequestParam( required = false ) Integer page, @RequestParam( required = false ) Integer pageSize, @RequestParam( required = false ) boolean totalPages, @RequestParam( required = false ) Boolean skipPaging, @RequestParam( required = false ) Boolean paging, @RequestParam( required = false ) String order, @RequestParam( required = false ) String attachment, @RequestParam( required = false, defaultValue = "false" ) boolean includeDeleted, @RequestParam( required = false ) String event, @RequestParam( required = false ) Set<String> filter, @RequestParam Map<String, String> parameters, IdSchemes idSchemes, Model model, HttpServletResponse response, HttpServletRequest request ) throws WebMessageException { WebOptions options = new WebOptions( parameters ); List<String> fields = Lists.newArrayList( contextService.getParameterValues( "fields" ) ); if ( fields.isEmpty() ) { fields.addAll( Preset.ALL.getFields() ); } CategoryOptionCombo attributeOptionCombo = inputUtils.getAttributeOptionCombo( attributeCc, attributeCos, true ); Set<String> eventIds = TextUtils.splitToArray( event, TextUtils.SEMICOLON ); Set<String> assignedUserIds = TextUtils.splitToArray( assignedUser, TextUtils.SEMICOLON ); Map<String, String> dataElementOrders = getDataElementsFromOrder( order ); lastUpdatedStartDate = lastUpdatedStartDate != null ? lastUpdatedStartDate : lastUpdated; skipPaging = PagerUtils.isSkipPaging( skipPaging, paging ); EventSearchParams params = eventService.getFromUrl( program, programStage, programStatus, followUp, orgUnit, ouMode, trackedEntityInstance, startDate, endDate, dueDateStart, dueDateEnd, lastUpdatedStartDate, lastUpdatedEndDate, lastUpdatedDuration, status, attributeOptionCombo, idSchemes, page, pageSize, totalPages, skipPaging, getOrderParams( order ), getGridOrderParams( order, dataElementOrders ), false, eventIds, assignedUserMode, assignedUserIds, filter, dataElementOrders.keySet(), false, includeDeleted ); Events events = eventService.getEvents( params ); if ( hasHref( fields ) ) { events.getEvents().forEach( e -> e.setHref( ContextUtils.getRootPath( request ) + RESOURCE_PATH + "/" + e.getEvent() ) ); } if ( !skipMeta && params.getProgram() != null ) { events.setMetaData( getMetaData( params.getProgram() ) ); } model.addAttribute( "model", events ); model.addAttribute( "viewClass", options.getViewClass( "detailed" ) ); RootNode rootNode = NodeUtils.createMetadata(); if ( events.getPager() != null ) { rootNode.addChild( NodeUtils.createPager( events.getPager() ) ); } if ( !StringUtils.isEmpty( attachment ) ) { response.addHeader( ContextUtils.HEADER_CONTENT_DISPOSITION, "attachment; filename=" + attachment ); response.addHeader( ContextUtils.HEADER_CONTENT_TRANSFER_ENCODING, "binary" ); } rootNode.addChild( fieldFilterService.toCollectionNode( Event.class, new FieldFilterParams( events.getEvents(), fields ) ) ); return rootNode; } @RequestMapping( method = RequestMethod.GET, produces = { "application/csv", "application/csv+gzip", "text/csv" } ) public void getCsvEvents( @RequestParam( required = false ) String program, @RequestParam( required = false ) String programStage, @RequestParam( required = false ) ProgramStatus programStatus, @RequestParam( required = false ) Boolean followUp, @RequestParam( required = false ) String trackedEntityInstance, @RequestParam( required = false ) String orgUnit, @RequestParam( required = false ) OrganisationUnitSelectionMode ouMode, @RequestParam( required = false ) AssignedUserSelectionMode assignedUserMode, @RequestParam( required = false ) String assignedUser, @RequestParam( required = false ) Date startDate, @RequestParam( required = false ) Date endDate, @RequestParam( required = false ) Date dueDateStart, @RequestParam( required = false ) Date dueDateEnd, @RequestParam( required = false ) Date lastUpdated, @RequestParam( required = false ) Date lastUpdatedStartDate, @RequestParam( required = false ) Date lastUpdatedEndDate, @RequestParam( required = false ) String lastUpdatedDuration, @RequestParam( required = false ) EventStatus status, @RequestParam( required = false ) String attributeCc, @RequestParam( required = false ) String attributeCos, @RequestParam( required = false ) Integer page, @RequestParam( required = false ) Integer pageSize, @RequestParam( required = false ) boolean totalPages, @RequestParam( required = false ) Boolean skipPaging, @RequestParam( required = false ) Boolean paging, @RequestParam( required = false ) String order, @RequestParam( required = false ) String event, @RequestParam( required = false ) Set<String> filter, @RequestParam( required = false ) String attachment, @RequestParam( required = false, defaultValue = "false" ) boolean includeDeleted, @RequestParam( required = false, defaultValue = "false" ) boolean skipHeader, IdSchemes idSchemes, HttpServletResponse response, HttpServletRequest request ) throws IOException, WebMessageException { CategoryOptionCombo attributeOptionCombo = inputUtils.getAttributeOptionCombo( attributeCc, attributeCos, true ); Set<String> eventIds = TextUtils.splitToArray( event, TextUtils.SEMICOLON ); Set<String> assignedUserIds = TextUtils.splitToArray( assignedUser, TextUtils.SEMICOLON ); List<Order> schemaOrders = getOrderParams( order ); Map<String, String> dataElementOrders = getDataElementsFromOrder( order ); lastUpdatedStartDate = lastUpdatedStartDate != null ? lastUpdatedStartDate : lastUpdated; skipPaging = PagerUtils.isSkipPaging( skipPaging, paging ); EventSearchParams params = eventService.getFromUrl( program, programStage, programStatus, followUp, orgUnit, ouMode, trackedEntityInstance, startDate, endDate, dueDateStart, dueDateEnd, lastUpdatedStartDate, lastUpdatedEndDate, lastUpdatedDuration, status, attributeOptionCombo, idSchemes, page, pageSize, totalPages, skipPaging, schemaOrders, getGridOrderParams( order, dataElementOrders ), false, eventIds, assignedUserMode, assignedUserIds, filter, dataElementOrders.keySet(), false, includeDeleted ); Events events = eventService.getEvents( params ); OutputStream outputStream = response.getOutputStream(); response.setContentType( "application/csv" ); if ( ContextUtils.isAcceptCsvGzip( request ) ) { response.addHeader( ContextUtils.HEADER_CONTENT_TRANSFER_ENCODING, "binary" ); outputStream = new GZIPOutputStream( outputStream ); response.setContentType( "application/csv+gzip" ); } if ( !StringUtils.isEmpty( attachment ) ) { response.addHeader( "Content-Disposition", "attachment; filename=" + attachment ); } csvEventService.writeEvents( outputStream, events, !skipHeader ); } // Rows Read @RequestMapping( value = "/eventRows", method = RequestMethod.GET ) public @ResponseBody EventRows getEventRows( @RequestParam( required = false ) String program, @RequestParam( required = false ) String orgUnit, @RequestParam( required = false ) OrganisationUnitSelectionMode ouMode, @RequestParam( required = false ) ProgramStatus programStatus, @RequestParam( required = false ) EventStatus eventStatus, @RequestParam( required = false ) Date startDate, @RequestParam( required = false ) Date endDate, @RequestParam( required = false ) String attributeCc, @RequestParam( required = false ) String attributeCos, @RequestParam( required = false ) boolean totalPages, @RequestParam( required = false ) Boolean skipPaging, @RequestParam( required = false ) Boolean paging, @RequestParam( required = false ) String order, @RequestParam( required = false, defaultValue = "false" ) boolean includeDeleted, @RequestParam Map<String, String> parameters, Model model ) throws WebMessageException { CategoryOptionCombo attributeOptionCombo = inputUtils.getAttributeOptionCombo( attributeCc, attributeCos, true ); skipPaging = PagerUtils.isSkipPaging( skipPaging, paging ); EventSearchParams params = eventService.getFromUrl( program, null, programStatus, null, orgUnit, ouMode, null, startDate, endDate, null, null, null, null, null, eventStatus, attributeOptionCombo, null, null, null, totalPages, skipPaging, getOrderParams( order ), null, true, null, null, null, null, null, false, includeDeleted ); return eventRowService.getEventRows( params ); } @RequestMapping( value = "/{uid}", method = RequestMethod.GET ) public @ResponseBody Event getEvent( @PathVariable( "uid" ) String uid, @RequestParam Map<String, String> parameters, Model model, HttpServletRequest request ) throws Exception { Event event = eventService.getEvent( programStageInstanceService.getProgramStageInstance( uid ) ); if ( event == null ) { throw new WebMessageException( WebMessageUtils.notFound( "Event not found for ID " + uid ) ); } event.setHref( ContextUtils.getRootPath( request ) + RESOURCE_PATH + "/" + uid ); return event; } @RequestMapping( value = "/files", method = RequestMethod.GET ) public void getEventDataValueFile( @RequestParam String eventUid, @RequestParam String dataElementUid, @RequestParam( defaultValue = "original" ) String dimension, HttpServletResponse response, HttpServletRequest request ) throws Exception { Event event = eventService.getEvent( programStageInstanceService.getProgramStageInstance( eventUid ) ); if ( event == null ) { throw new WebMessageException( WebMessageUtils.notFound( "Event not found for ID " + eventUid ) ); } DataElement dataElement = dataElementService.getDataElement( dataElementUid ); if ( dataElement == null ) { throw new WebMessageException( WebMessageUtils.notFound( "DataElement not found for ID " + dataElementUid ) ); } if ( !dataElement.isFileType() ) { throw new WebMessageException( WebMessageUtils.conflict( "DataElement must be of type file" ) ); } // Get file resource String uid = null; for ( DataValue value : event.getDataValues() ) { if ( value.getDataElement() != null && value.getDataElement().equals( dataElement.getUid() ) ) { uid = value.getValue(); break; } } if ( uid == null ) { throw new WebMessageException( WebMessageUtils.conflict( "DataElement must be of type file" ) ); } FileResource fileResource = fileResourceService.getFileResource( uid ); if ( fileResource == null || fileResource.getDomain() != FileResourceDomain.DATA_VALUE ) { throw new WebMessageException( WebMessageUtils.notFound( "A data value file resource with id " + uid + " does not exist." ) ); } if ( fileResource.getStorageStatus() != FileResourceStorageStatus.STORED ) { // The FileResource exists and is tied to DataValue, however the // underlying file content still not stored to external file store WebMessage webMessage = WebMessageUtils.conflict( "The content is being processed and is not available yet. Try again later.", "The content requested is in transit to the file store and will be available at a later time." ); webMessage.setResponse( new FileResourceWebMessageResponse( fileResource ) ); throw new WebMessageException( webMessage ); } FileResourceUtils.setImageFileDimensions( fileResource, dimension ); ByteSource content = fileResourceService.getFileResourceContent( fileResource ); if ( content == null ) { throw new WebMessageException( WebMessageUtils.notFound( "The referenced file could not be found" ) ); } // Attempt to build signed URL request for content and redirect URI signedGetUri = fileResourceService.getSignedGetFileResourceContentUri( fileResource ); if ( signedGetUri != null ) { response.setStatus( HttpServletResponse.SC_TEMPORARY_REDIRECT ); response.setHeader( HttpHeaders.LOCATION, signedGetUri.toASCIIString() ); return; } // Build response and return response.setContentType( fileResource.getContentType() ); response.setContentLength( new Long( fileResource.getContentLength() ).intValue() ); response.setHeader( HttpHeaders.CONTENT_DISPOSITION, "filename=" + fileResource.getName() ); // Request signing is not available, stream content back to client try ( InputStream inputStream = content.openStream() ) { IOUtils.copy( inputStream, response.getOutputStream() ); } catch ( IOException e ) { throw new WebMessageException( WebMessageUtils.error( "Failed fetching the file from storage", "There was an exception when trying to fetch the file from the storage backend. " + "Depending on the provider the root cause could be network or file system related." ) ); } } // Create @RequestMapping( method = RequestMethod.POST, consumes = "application/xml" ) public void postXmlEvent( @RequestParam( defaultValue = "CREATE_AND_UPDATE" ) ImportStrategy strategy, HttpServletResponse response, HttpServletRequest request, ImportOptions importOptions ) throws Exception { importOptions.setImportStrategy( strategy ); InputStream inputStream = StreamUtils.wrapAndCheckCompressionFormat( request.getInputStream() ); importOptions.setIdSchemes( getIdSchemesFromParameters( importOptions.getIdSchemes(), contextService.getParameterValuesMap() ) ); if ( !importOptions.isAsync() ) { ImportSummaries importSummaries = eventService.addEventsXml( inputStream, importOptions ); importSummaries.setImportOptions( importOptions ); importSummaries.getImportSummaries().stream() .filter( importSummary -> !importOptions.isDryRun() && !importSummary.getStatus().equals( ImportStatus.ERROR ) && !importOptions.getImportStrategy().isDelete() && (!importOptions.getImportStrategy().isSync() || importSummary.getImportCount().getDeleted() == 0) ) .forEach( importSummary -> importSummary.setHref( ContextUtils.getRootPath( request ) + RESOURCE_PATH + "/" + importSummary.getReference() ) ); if ( importSummaries.getImportSummaries().size() == 1 ) { ImportSummary importSummary = importSummaries.getImportSummaries().get( 0 ); importSummary.setImportOptions( importOptions ); if ( !importOptions.isDryRun() ) { if ( !importSummary.getStatus().equals( ImportStatus.ERROR ) ) { response.setHeader( "Location", ContextUtils.getRootPath( request ) + RESOURCE_PATH + "/" + importSummary.getReference() ); } } } webMessageService.send( WebMessageUtils.importSummaries( importSummaries ), response, request ); } else { List<Event> events = eventService.getEventsXml( inputStream ); startAsyncImport( importOptions, events, request, response ); } } @RequestMapping( method = RequestMethod.POST, consumes = "application/json" ) public void postJsonEvent( @RequestParam( defaultValue = "CREATE_AND_UPDATE" ) ImportStrategy strategy, HttpServletResponse response, HttpServletRequest request, ImportOptions importOptions ) throws Exception { importOptions.setImportStrategy( strategy ); InputStream inputStream = StreamUtils.wrapAndCheckCompressionFormat( request.getInputStream() ); importOptions.setIdSchemes( getIdSchemesFromParameters( importOptions.getIdSchemes(), contextService.getParameterValuesMap() ) ); if ( !importOptions.isAsync() ) { ImportSummaries importSummaries = eventService.addEventsJson( inputStream, importOptions ); importSummaries.setImportOptions( importOptions ); importSummaries.getImportSummaries().stream() .filter( importSummary -> !importOptions.isDryRun() && !importSummary.getStatus().equals( ImportStatus.ERROR ) && !importOptions.getImportStrategy().isDelete() && (!importOptions.getImportStrategy().isSync() || importSummary.getImportCount().getDeleted() == 0) ) .forEach( importSummary -> importSummary.setHref( ContextUtils.getRootPath( request ) + RESOURCE_PATH + "/" + importSummary.getReference() ) ); if ( importSummaries.getImportSummaries().size() == 1 ) { ImportSummary importSummary = importSummaries.getImportSummaries().get( 0 ); importSummary.setImportOptions( importOptions ); if ( !importOptions.isDryRun() ) { if ( !importSummary.getStatus().equals( ImportStatus.ERROR ) ) { response.setHeader( "Location", ContextUtils.getRootPath( request ) + RESOURCE_PATH + "/" + importSummary.getReference() ); } } } webMessageService.send( WebMessageUtils.importSummaries( importSummaries ), response, request ); } else { List<Event> events = eventService.getEventsJson( inputStream ); startAsyncImport( importOptions, events, request, response ); } } @RequestMapping( value = "/{uid}/note", method = RequestMethod.POST, consumes = "application/json" ) public void postJsonEventForNote( @PathVariable( "uid" ) String uid, HttpServletResponse response, HttpServletRequest request, ImportOptions importOptions ) throws IOException, WebMessageException { if ( !programStageInstanceService.programStageInstanceExists( uid ) ) { throw new WebMessageException( WebMessageUtils.notFound( "Event not found for ID " + uid ) ); } InputStream inputStream = StreamUtils.wrapAndCheckCompressionFormat( request.getInputStream() ); Event event = renderService.fromJson( inputStream, Event.class ); event.setEvent( uid ); eventService.updateEventForNote( event ); webMessageService.send( WebMessageUtils.ok( "Event updated: " + uid ), response, request ); } @RequestMapping( method = RequestMethod.POST, consumes = { "application/csv", "text/csv" } ) public void postCsvEvents( @RequestParam( required = false, defaultValue = "false" ) boolean skipFirst, HttpServletResponse response, HttpServletRequest request, ImportOptions importOptions ) throws IOException, ParseException { InputStream inputStream = StreamUtils.wrapAndCheckCompressionFormat( request.getInputStream() ); Events events = csvEventService.readEvents( inputStream, skipFirst ); if ( !importOptions.isAsync() ) { ImportSummaries importSummaries = eventService.addEvents( events.getEvents(), importOptions, null ); importSummaries.setImportOptions( importOptions ); webMessageService.send( WebMessageUtils.importSummaries( importSummaries ), response, request ); } else { startAsyncImport( importOptions, events.getEvents(), request, response ); } } // Update @RequestMapping( value = "/{uid}", method = RequestMethod.PUT, consumes = { "application/xml", "text/xml" } ) public void putXmlEvent( HttpServletResponse response, HttpServletRequest request, @PathVariable( "uid" ) String uid, ImportOptions importOptions ) throws IOException { InputStream inputStream = StreamUtils.wrapAndCheckCompressionFormat( request.getInputStream() ); Event updatedEvent = renderService.fromXml( inputStream, Event.class ); updatedEvent.setEvent( uid ); updateEvent( updatedEvent, false, importOptions, request, response ); } @RequestMapping( value = "/{uid}", method = RequestMethod.PUT, consumes = "application/json" ) public void putJsonEvent( HttpServletResponse response, HttpServletRequest request, @PathVariable( "uid" ) String uid, ImportOptions importOptions ) throws IOException { InputStream inputStream = StreamUtils.wrapAndCheckCompressionFormat( request.getInputStream() ); Event updatedEvent = renderService.fromJson( inputStream, Event.class ); updatedEvent.setEvent( uid ); updateEvent( updatedEvent, false, importOptions, request, response ); } private void updateEvent( Event updatedEvent, boolean singleValue, ImportOptions importOptions, HttpServletRequest request, HttpServletResponse response ) { ImportSummary importSummary = eventService.updateEvent( updatedEvent, singleValue, importOptions, false ); importSummary.setImportOptions( importOptions ); webMessageService.send( WebMessageUtils.importSummary( importSummary ), response, request ); } @RequestMapping( value = "/{uid}/{dataElementUid}", method = RequestMethod.PUT, consumes = "application/json" ) public void putJsonEventSingleValue( HttpServletResponse response, HttpServletRequest request, @PathVariable( "uid" ) String uid, @PathVariable( "dataElementUid" ) String dataElementUid ) throws IOException, WebMessageException { DataElement dataElement = dataElementService.getDataElement( dataElementUid ); if ( dataElement == null ) { WebMessage webMsg = WebMessageUtils.notFound( "DataElement not found for ID " + dataElementUid ); webMessageService.send( webMsg, response, request ); } InputStream inputStream = StreamUtils.wrapAndCheckCompressionFormat( request.getInputStream() ); Event updatedEvent = renderService.fromJson( inputStream, Event.class ); updatedEvent.setEvent( uid ); updateEvent( updatedEvent, true, null, request, response ); } @RequestMapping( value = "/{uid}/eventDate", method = RequestMethod.PUT, consumes = "application/json" ) public void putJsonEventForEventDate( HttpServletResponse response, HttpServletRequest request, @PathVariable( "uid" ) String uid, ImportOptions importOptions ) throws IOException, WebMessageException { if ( !programStageInstanceService.programStageInstanceExists( uid ) ) { throw new WebMessageException( WebMessageUtils.notFound( "Event not found for ID " + uid ) ); } InputStream inputStream = StreamUtils.wrapAndCheckCompressionFormat( request.getInputStream() ); Event updatedEvent = renderService.fromJson( inputStream, Event.class ); updatedEvent.setEvent( uid ); eventService.updateEventForEventDate( updatedEvent ); webMessageService.send( WebMessageUtils.ok( "Event updated " + uid ), response, request ); } // Delete @RequestMapping( value = "/{uid}", method = RequestMethod.DELETE ) public void deleteEvent( HttpServletResponse response, HttpServletRequest request, @PathVariable( "uid" ) String uid ) { ImportSummary importSummary = eventService.deleteEvent( uid ); webMessageService.send( WebMessageUtils.importSummary( importSummary ), response, request ); } // Supportive methods private Map<String, String> getDataElementsFromOrder( String allOrders ) { Map<String, String> dataElements = new HashMap<>(); if ( allOrders != null ) { for ( String order : TextUtils.splitToArray( allOrders, TextUtils.SEMICOLON ) ) { String[] orderParts = order.split( ":" ); DataElement de = dataElementService.getDataElement( orderParts[0] ); if ( de != null ) { String direction = "asc"; if ( orderParts.length == 2 && orderParts[1].toLowerCase().equals( "desc" ) ) { direction = "desc"; } dataElements.put( de.getUid(), direction ); } } } return dataElements; } /** * Starts an asynchronous import task. * * @param importOptions the ImportOptions. * @param events the events to import. * @param request the HttpRequest. * @param response the HttpResponse. */ private void startAsyncImport( ImportOptions importOptions, List<Event> events, HttpServletRequest request, HttpServletResponse response ) { JobConfiguration jobId = new JobConfiguration( "inMemoryEventImport", EVENT_IMPORT, currentUserService.getCurrentUser().getUid(), true ); schedulingManager.executeJob( new ImportEventsTask( events, eventService, importOptions, jobId ) ); response.setHeader( "Location", ContextUtils.getRootPath( request ) + "/system/tasks/" + EVENT_IMPORT ); webMessageService.send( jobConfigurationReport( jobId ), response, request ); } private boolean fieldsContains( String match, List<String> fields ) { for ( String field : fields ) { // For now assume href/access if * or preset is requested if ( field.contains( match ) || field.equals( "*" ) || field.startsWith( ":" ) ) { return true; } } return false; } protected boolean hasHref( List<String> fields ) { return fieldsContains( "href", fields ); } private List<Order> getOrderParams( String order ) { if ( order != null && !StringUtils.isEmpty( order ) ) { OrderParams op = new OrderParams( Sets.newLinkedHashSet( Arrays.asList( order.split( "," ) ) ) ); return op.getOrders( getSchema() ); } return null; } private List<String> getGridOrderParams( String order ) { if ( order != null && !StringUtils.isEmpty( order ) ) { return Arrays.asList( order.split( "," ) ); } return null; } private List<String> getGridOrderParams( String order, Map<String, String> dataElementOrders ) { List<String> dataElementOrderList = new ArrayList<String>(); if ( !StringUtils.isEmpty( order ) && dataElementOrders != null && dataElementOrders.size() > 0 ) { List<String> orders = Arrays.asList( order.split( ";" ) ); for ( String orderItem : orders ) { String dataElementCandidate = orderItem.split( ":" )[0]; if ( dataElementOrders.keySet().contains( dataElementCandidate ) ) { dataElementOrderList.add( dataElementCandidate + ":" + dataElementOrders.get( dataElementCandidate ) ); } } } return dataElementOrderList; } private Map<Object, Object> getMetaData( Program program ) { Map<Object, Object> metaData = new HashMap<>(); if ( program != null ) { Map<String, String> dataElements = new HashMap<>(); for ( DataElement de : program.getDataElements() ) { dataElements.put( de.getUid(), de.getDisplayName() ); } metaData.put( META_DATA_KEY_DE, dataElements ); } return metaData; } private IdSchemes getIdSchemesFromParameters( IdSchemes idSchemes, Map<String, List<String>> params ) { String idScheme = getParamValue( params, "idScheme" ); if ( idScheme != null ) { idSchemes.setIdScheme( idScheme ); } String programStageInstanceIdScheme = getParamValue( params, "programStageInstanceIdScheme" ); if ( programStageInstanceIdScheme != null ) { idSchemes.setProgramStageInstanceIdScheme( programStageInstanceIdScheme ); } return idSchemes; } private String getParamValue( Map<String, List<String>> params, String key ) { return params.get( key ) != null ? params.get( key ).get( 0 ) : null; } }
package rinde.opt.localsearch; import static com.google.common.collect.Lists.newArrayList; import static java.util.Collections.reverseOrder; import static java.util.Collections.sort; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static rinde.opt.localsearch.InsertionsTest.list; import static rinde.opt.localsearch.Swaps.inListSwap; import static rinde.opt.localsearch.Swaps.removeAll; import static rinde.opt.localsearch.Swaps.replace; import static rinde.opt.localsearch.Swaps.swap; import java.util.Iterator; import java.util.List; import org.junit.Before; import org.junit.Test; import rinde.opt.localsearch.Swaps.Swap; import com.google.common.base.Objects; import com.google.common.collect.ImmutableList; /** * Test of {@link Swaps}. * @author Rinde van Lon <rinde.vanlon@cs.kuleuven.be> */ public class SwapsTest { static final String A = "A"; static final String B = "B"; static final String C = "C"; static final String D = "D"; static final String E = "E"; static final String F = "F"; static final String G = "G"; @SuppressWarnings("null") Schedule<SortDirection, String> schedule; enum SortDirection { ASCENDING, DESCENDING; } /** * Creates a schedule for testing. */ @SuppressWarnings("unchecked") @Before public void setUp() { schedule = Schedule.create(SortDirection.ASCENDING, list(list(G, D, D, G), list(A, C, B, F, E, F, A, B)), list(0, 0), new StringListEvaluator()); } /** * Tests whether swap iterator produces correct swaps. */ @Test public void generateTest() { final Schedule<SortDirection, String> s = Schedule.create( SortDirection.DESCENDING, list(list(A, A, B, E), list(C, D)), list(0, 0), new StringListEvaluator()); final Iterator<Swap<String>> it = Swaps.swapIterator(s); while (it.hasNext()) { final Swap<String> swapOperation = it.next(); final ImmutableList<ImmutableList<String>> routes = Swaps.swap(s, swapOperation, 100).get().routes; } } /** * Evaluator providing an objective function for sorting strings in ascending * or descending order. * @author Rinde van Lon <rinde.vanlon@cs.kuleuven.be> */ static class StringListEvaluator implements RouteEvaluator<SortDirection, String> { @Override public double computeCost(SortDirection context, int routeIndex, ImmutableList<String> newRoute) { final List<String> expected = newArrayList(newRoute); if (context == SortDirection.DESCENDING) { sort(expected, reverseOrder()); } else { sort(expected); } double error = 0; for (int i = 0; i < expected.size(); i++) { final int foundAtIndex = newRoute.indexOf(expected.get(i)); error += Math.abs(i - foundAtIndex); } return error; } @Override public String toString() { return Objects.toStringHelper(this).toString(); } } /** * Tests for swapping of one item at a time. */ @SuppressWarnings("unchecked") @Test public void singleSwapTest() { final Schedule<SortDirection, String> s = Schedule.create( SortDirection.ASCENDING, list(list(A, C, B), list(D)), list(0, 0), new StringListEvaluator()); assertFalse(swap(s, new Swap<String>(B, 0, 0, list(0)), 0d).isPresent()); assertFalse(swap(s, new Swap<String>(B, 0, 1, list(1)), 0d).isPresent()); final Schedule<SortDirection, String> swap1 = swap(s, new Swap<String>(B, 0, 1, list(0)), 0d).get(); assertEquals(s.context, swap1.context); assertEquals(s.evaluator, swap1.evaluator); assertEquals(list(list(A, C), list(B, D)), swap1.routes); assertEquals(list(0d, 0d), swap1.objectiveValues); assertEquals(0, swap1.objectiveValue, 0.00001); final Schedule<SortDirection, String> swap2 = swap(s, new Swap<String>(B, 0, 0, list(1)), 0d).get(); assertEquals(s.context, swap2.context); assertEquals(s.evaluator, swap2.evaluator); assertEquals(list(list(A, B, C), list(D)), swap2.routes); assertEquals(list(0d, 0d), swap2.objectiveValues); assertEquals(0, swap2.objectiveValue, 0.00001); } /** * Swap two (the same) items at once. */ @SuppressWarnings("unchecked") @Test public void doubleSwapTest() { final Schedule<SortDirection, String> s = Schedule.create( SortDirection.ASCENDING, list(list(G, D, D, G), list(A, C, B, F, E, F, A, B)), list(0, 0), new StringListEvaluator()); assertEquals(s.objectiveValues.size(), s.routes.size()); assertFalse(s.toString().isEmpty()); final Schedule<SortDirection, String> swap1 = swap(s, new Swap<String>(A, 1, 0, list(1, 3)), 10).get(); assertEquals(s.context, swap1.context); assertEquals(s.evaluator, swap1.evaluator); assertEquals(list(list(G, A, D, D, A, G), list(C, B, F, E, F, B)), swap1.routes); assertEquals(list(11d, 8d), swap1.objectiveValues); assertEquals(s.objectiveValue, swap1.objectiveValue, 0.00001); // within list assertEquals(list(list(G, D, D, G), list(A, A, C, B, F, E, F, B)), swap(s, new Swap<String>(A, 1, 1, list(0, 0)), 10).get().routes); assertEquals(list(list(G, D, D, G), list(A, C, A, B, F, E, F, B)), swap(s, new Swap<String>(A, 1, 1, list(0, 1)), 10).get().routes); assertEquals(list(list(G, D, D, G), list(C, A, A, B, F, E, F, B)), swap(s, new Swap<String>(A, 1, 1, list(1, 1)), 10).get().routes); assertEquals(list(list(G, D, D, G), list(C, A, B, A, F, E, F, B)), swap(s, new Swap<String>(A, 1, 1, list(1, 2)), 10).get().routes); assertEquals(list(list(G, D, D, G), list(C, A, B, F, A, E, F, B)), swap(s, new Swap<String>(A, 1, 1, list(1, 3)), 10).get().routes); assertEquals(list(list(G, D, D, G), list(C, A, B, F, E, A, F, B)), swap(s, new Swap<String>(A, 1, 1, list(1, 4)), 10).get().routes); assertEquals(list(list(G, D, D, G), list(C, A, B, F, E, F, A, B)), swap(s, new Swap<String>(A, 1, 1, list(1, 5)), 10).get().routes); assertEquals(list(list(G, D, D, G), list(C, A, B, F, E, F, B, A)), swap(s, new Swap<String>(A, 1, 1, list(1, 6)), 10).get().routes); // to other list assertEquals(list(list(A, A, G, D, D, G), list(C, B, F, E, F, B)), swap(s, new Swap<String>(A, 1, 0, list(0, 0)), 10).get().routes); assertEquals(list(list(A, G, A, D, D, G), list(C, B, F, E, F, B)), swap(s, new Swap<String>(A, 1, 0, list(0, 1)), 10).get().routes); assertEquals(list(list(A, G, D, A, D, G), list(C, B, F, E, F, B)), swap(s, new Swap<String>(A, 1, 0, list(0, 2)), 10).get().routes); assertEquals(list(list(A, G, D, D, A, G), list(C, B, F, E, F, B)), swap(s, new Swap<String>(A, 1, 0, list(0, 3)), 10).get().routes); assertEquals(list(list(A, G, D, D, G, A), list(C, B, F, E, F, B)), swap(s, new Swap<String>(A, 1, 0, list(0, 4)), 10).get().routes); } /** * fromRow can not be negative. */ @Test(expected = IllegalArgumentException.class) public void swapNegativeFromRow() { swap(schedule, new Swap<String>(A, -1, 1, list(1)), 0d); } /** * fromRow can not be too large. */ @Test(expected = IllegalArgumentException.class) public void swapToLargeFromRow() { swap(schedule, new Swap<String>(A, 2, 1, list(1)), 0d); } /** * toRow can not be negative. */ @Test(expected = IllegalArgumentException.class) public void swapNegativeToRow() { swap(schedule, new Swap<String>(A, 1, -1, list(1)), 0d); } /** * toRow can not be too large. */ @Test(expected = IllegalArgumentException.class) public void swapTooLargeToRow() { swap(schedule, new Swap<String>(A, 1, 2, list(1)), 0d); } /** * A is not in row 0, hence it cannot be swapped. */ @Test(expected = IllegalArgumentException.class) public void swapWrongRow() { swap(schedule, new Swap<String>(A, 0, 1, list(1)), 0d); } /** * There are two occurrences of A in row 1, therefore there should be 2 * indices. */ @Test(expected = IllegalArgumentException.class) public void swapIncorrectIndicesSize() { swap(schedule, new Swap<String>(A, 1, 0, list(1)), 0d); } /** * Cannot move A to index -1 (does not exist). */ @Test(expected = IllegalArgumentException.class) public void swapToNegativeIndices() { swap(schedule, new Swap<String>(A, 1, 0, list(1, -1)), 0d); } /** * Cannot move A to index 8 (does not exist). */ @Test(expected = IllegalArgumentException.class) public void swapToTooLargeIndices() { swap(schedule, new Swap<String>(A, 1, 0, list(1, 8)), 0d); } /** * Several tests for the inListSwap method. */ @Test public void inListSwapTest() { assertEquals(InsertionsTest.list(A, C, B), inListSwap(InsertionsTest.list(A, B, C), list(2), B)); assertEquals( InsertionsTest.list(D, A, B, C, D, D), inListSwap(InsertionsTest.list(A, B, C, D, D, D), InsertionsTest.list(0, 3, 3), D)); boolean fail = false; try { inListSwap(InsertionsTest.list(A, B, C), InsertionsTest.list(0), A); } catch (final IllegalArgumentException e) { fail = true; } assertTrue(fail); assertEquals(InsertionsTest.list(B, A, C), inListSwap(InsertionsTest.list(A, B, C), InsertionsTest.list(1), A)); assertEquals(InsertionsTest.list(B, C, A), inListSwap(InsertionsTest.list(A, B, C), InsertionsTest.list(2), A)); } /** * Test for empty list. */ @Test(expected = IllegalArgumentException.class) public void insListSwapEmptyList() { inListSwap(InsertionsTest.list(), InsertionsTest.list(1), A); } /** * May not swap such that the result equals the input (a no effect swap). */ @Test(expected = IllegalArgumentException.class) public void inListSwapSameLocation() { inListSwap(InsertionsTest.list(A, B, C), InsertionsTest.list(1), B); } /** * Number of occurrences of B (1) should equal number of insertions (2). */ @Test(expected = IllegalArgumentException.class) public void inListSwapNonSymmetric1() { inListSwap(InsertionsTest.list(A, B, C), InsertionsTest.list(1, 2), B); } /** * Number of occurrences of B (2) should equal number of insertions (1). */ @Test(expected = IllegalArgumentException.class) public void inListSwapNonSymmetric2() { inListSwap(InsertionsTest.list(A, B, C, B), InsertionsTest.list(2), B); } /** * Tests the remove all method. */ @Test public void removeAllTest() { final List<String> list = newArrayList(A, B, C, A, B, C, D); assertEquals(7, list.size()); assertEquals(InsertionsTest.list(2, 5), removeAll(list, C)); assertEquals(5, list.size()); assertEquals(InsertionsTest.list(), removeAll(list, C)); assertEquals(5, list.size()); assertEquals(InsertionsTest.list(1, 3), removeAll(list, B)); assertEquals(3, list.size()); assertEquals(InsertionsTest.list(2), removeAll(list, D)); assertEquals(2, list.size()); assertEquals(InsertionsTest.list(0, 1), removeAll(list, A)); assertTrue(list.isEmpty()); assertEquals(InsertionsTest.list(), removeAll(newArrayList(), A)); } /** * Test replace with valid inputs. */ @Test public void replaceTest() { assertEquals(list(4, 5, 6), replace(list(1, 2, 3), list(0, 1, 2), list(4, 5, 6))); assertEquals(list(), replace(list(), ImmutableList.<Integer> of(), list())); } /** * Number of indices must equal number of elements. */ @Test(expected = IllegalArgumentException.class) public void replaceInvalidTest() { replace(list(1), list(0, 0), list(2)); } }
package technology.tabula; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.awt.geom.Point2D; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.junit.Assert; import org.junit.Test; public class TestRectangle { @Test public void testCompareEqualsRectangles() { Rectangle first = new Rectangle(); Rectangle second = new Rectangle(); assertTrue(first.equals(second)); assertTrue(second.equals(first)); } @Test public void testCompareAlignedHorizontalRectangle() { Rectangle lower = new Rectangle(0f, 10f, 10f, 10f); Rectangle upper = new Rectangle(0f,20f, 10f, 10f); assertTrue(lower.compareTo(upper) < 0); } @Test public void testCompareAlignedVerticalRectangle() { Rectangle lower = new Rectangle(10f, 0f, 10f, 10f); Rectangle upper = new Rectangle(20f,0f, 10f, 10f); assertTrue(lower.compareTo(upper) < 0); } @Test public void testCompareVerticalOverlapRectangle() { Rectangle lower = new Rectangle(5f, 0f, 10f, 10f); Rectangle upper = new Rectangle(0f, 10f, 10f, 10f); assertTrue(lower.compareTo(upper) < 0); } @Test public void testCompareVerticalOverlapLessThresholdRectangle() { Rectangle lower = new Rectangle(0f, 10f, 10f, 10f); Rectangle upper = new Rectangle(9.8f, 0f, 10f, 10f); assertTrue(lower.compareTo(upper) < 0); } @Test public void testQuickSortOneUpperThanOther() { Rectangle lower = new Rectangle(175.72f, 72.72f, 1.67f, 1.52f); //, (Comma after AARON) Rectangle upper = new Rectangle(169.21f, 161.16f, 4.33f, 4.31f); // R (REGIONAL PULMONARY) assertTrue(lower.compareTo(upper) > 0); } @Test public void testQuickSortRectangleList() { //Testing wrong sorting // Expected: AARON, JOSHUA, N // but was: AARON JOSHUA N , , Rectangle first = new Rectangle(172.92999267578125f, 51.47999954223633f, 4.0f, 4.309999942779541f); Rectangle second = new Rectangle(175.72000122070312f, 72.72000122070312f, 1.6699999570846558f, 1.5199999809265137f); Rectangle third = new Rectangle(172.92999267578125f, 96.36000061035156f, 4.0f, 4.309999942779541f); Rectangle fourth = new Rectangle(175.72000122070312f, 100.31999969482422f, 1.6699999570846558f, 1.5199999809265137f); Rectangle fifth = new Rectangle(172.92999267578125f, 103.68000030517578f, 4.329999923706055f, 4.309999942779541f); Rectangle sixth = new Rectangle(169.2100067138672f, 161.16000366210938f, 4.329999923706055f, 4.309999942779541f); List<Rectangle> expectedList = new ArrayList<Rectangle>(); expectedList.add(first); expectedList.add(sixth); expectedList.add(second); expectedList.add(third); expectedList.add(fourth); expectedList.add(fifth); List<Rectangle> toSortList = new ArrayList<Rectangle>(); toSortList.add(sixth); toSortList.add(second); toSortList.add(third); toSortList.add(fifth); toSortList.add(first); toSortList.add(fourth); Collections.sort(toSortList); assertEquals(expectedList, toSortList); } @Test public void testGetVerticalOverlapShouldReturnZero() { Rectangle lower = new Rectangle(10f, 0f, 10f, 10f); Rectangle upper = new Rectangle(20f,0f, 10f, 10f); float overlap = lower.verticalOverlap(upper); assertEquals(0f, overlap, 0); assertTrue(!lower.verticallyOverlaps(upper)); assertEquals(0f, lower.verticalOverlapRatio(upper), 0); assertEquals(0f, lower.overlapRatio(upper), 0); } @Test public void testGetVerticalOverlapShouldReturnMoreThanZero() { Rectangle lower = new Rectangle(15f, 10f, 10f, 10f); Rectangle upper = new Rectangle(20f, 0f, 10f, 10f); float overlap = lower.verticalOverlap(upper); assertEquals(5f, overlap, 0); assertTrue(lower.verticallyOverlaps(upper)); assertEquals(0.5f, lower.verticalOverlapRatio(upper), 0); assertEquals(0f, lower.overlapRatio(upper), 0); } @Test public void testGetHorizontalOverlapShouldReturnZero() { Rectangle one = new Rectangle(0f, 0f, 10f, 10f); Rectangle two = new Rectangle(10f, 10f, 10f, 10f); assertTrue(!one.horizontallyOverlaps(two)); assertEquals(0f, one.overlapRatio(two), 0); } @Test public void testGetHorizontalOverlapShouldReturnMoreThanZero() { Rectangle one = new Rectangle(0f, 0f, 10f, 10f); Rectangle two = new Rectangle(10f, 5f, 10f, 10f); assertTrue(one.horizontallyOverlaps(two)); assertEquals(5f, one.horizontalOverlap(two), 0); assertEquals(0f, one.overlapRatio(two), 0); } @Test public void testGetOverlapShouldReturnMoreThanZero() { Rectangle one = new Rectangle(0f, 0f, 10f, 10f); Rectangle two = new Rectangle(5f, 5f, 10f, 10f); assertTrue(one.horizontallyOverlaps(two)); assertTrue(one.verticallyOverlaps(two)); assertEquals(5f, one.horizontalOverlap(two), 0); assertEquals(5f, one.verticalOverlap(two), 0); assertEquals((25f/175), one.overlapRatio(two), 0); } @Test public void testMergeNoOverlappingRectangles() { Rectangle one = new Rectangle(0f, 0f, 10f, 10f); Rectangle two = new Rectangle(0f, 10f, 10f, 10f); one.merge(two); assertEquals(20f, one.getWidth(), 0); assertEquals(10f, one.getHeight(), 0); assertEquals(0f, one.getLeft(), 0); assertEquals(0f, one.getTop(), 0); assertEquals(10f, one.getBottom(), 0); assertEquals(20f * 10f, one.getArea(), 0); } @Test public void testMergeOverlappingRectangles() { Rectangle one = new Rectangle(0f, 0f, 10f, 10f); Rectangle two = new Rectangle(5f, 5f, 10f, 10f); one.merge(two); assertEquals(15f, one.getWidth(), 0); assertEquals(15f, one.getHeight(), 0); assertEquals(0f, one.getLeft(), 0); assertEquals(0f, one.getTop(), 0); } @Test public void testRectangleGetPoints() { Rectangle one = new Rectangle(10f, 20f, 30f, 40f); Point2D[] points = one.getPoints(); Point2D[] expectedPoints = new Point2D[]{ new Point2D.Float(20f, 10f), new Point2D.Float(50f, 10f), new Point2D.Float(50f, 50f), new Point2D.Float(20f, 50f) }; Assert.assertArrayEquals(expectedPoints, points); } @Test public void testGetBoundingBox() { List<Rectangle> rectangles = new ArrayList<Rectangle>(); rectangles.add(new Rectangle(0f, 0f, 10f, 10f)); rectangles.add(new Rectangle(20f, 30f, 10f, 10f)); Rectangle boundingBoxOf = Rectangle.boundingBoxOf(rectangles); assertEquals(new Rectangle(0f, 0f, 40f, 30f), boundingBoxOf); } @Test public void testTransitiveComparison1() { Rectangle a = new Rectangle(0,0,2,2); Rectangle b = new Rectangle(1,1,2,2); Rectangle c = new Rectangle(2,2,2,2); assertTrue(a.compareTo(b) < 0); assertTrue(b.compareTo(c) < 0); assertTrue(a.compareTo(c) < 0); } @Test public void testTransitiveComparison2() { Rectangle a = new Rectangle(2,0,2,2); Rectangle b = new Rectangle(1,1,2,2); Rectangle c = new Rectangle(0,2,2,2); assertTrue(a.compareTo(b) < 0); assertTrue(b.compareTo(c) < 0); assertTrue(a.compareTo(c) < 0); } }
package gov.va.escreening.controller.dashboard; import gov.va.escreening.form.CreateVeteranFormBean; import gov.va.escreening.service.VeteranService; import java.text.ParseException; import java.text.SimpleDateFormat; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import javax.validation.Valid; import org.apache.commons.lang3.StringUtils; import org.hibernate.exception.ConstraintViolationException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DataIntegrityViolationException; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; @Controller @RequestMapping(value = "/dashboard") public class CreateVeteranController { private static final Logger logger = LoggerFactory.getLogger(CreateVeteranController.class); private VeteranService veteranService; @Autowired public void setVeteranService(VeteranService veteranService) { this.veteranService = veteranService; } /** * Returns the backing bean for the form. * * @return */ @ModelAttribute public CreateVeteranFormBean getCreateVeteranFormBean() { logger.debug("Creating new CreateVeteranFormBean"); return new CreateVeteranFormBean(); } /** * Initialize and setup page. * * @param createVeteranFormBean * @param model * @return */ @RequestMapping(value = "/createVeteran", method = RequestMethod.GET) public String setUpPageCreateVeteran(HttpServletRequest request, @ModelAttribute CreateVeteranFormBean createVeteranFormBean, Model model) { accomodateCreateVeteranFormBeamFromSearchResult(request, createVeteranFormBean, model); return "dashboard/createVeteran"; } private void accomodateCreateVeteranFormBeamFromSearchResult(HttpServletRequest request, CreateVeteranFormBean createVeteranFormBean, Model model) { HttpSession session = request.getSession(); String lastName = (String) session.getAttribute("lastName"); String ssnLastFour = (String) session.getAttribute("ssnLastFour"); boolean updateCreateVeteranFormBean = false; if (lastName != null) { createVeteranFormBean.setLastName(lastName); session.removeAttribute("lastName"); updateCreateVeteranFormBean = true; } if (ssnLastFour != null) { createVeteranFormBean.setSsnLastFour(ssnLastFour); session.removeAttribute("ssnLastFour"); updateCreateVeteranFormBean = true; } if (updateCreateVeteranFormBean) { model.addAttribute("createVeteranFormBean", createVeteranFormBean); } } /** * Saves the form into the database. * * @param createVeteranFormBean * @param result * @param model * @return */ @RequestMapping(value = "/createVeteran", method = RequestMethod.POST, params = "saveButton") public String processCreateVeteran(@Valid @ModelAttribute CreateVeteranFormBean createVeteranFormBean, BindingResult result, Model model) { // Need to validate birthDate. if (StringUtils.isNotBlank(createVeteranFormBean.getBirthDateString())) { SimpleDateFormat sdf = new SimpleDateFormat("MM/dd/yyyy"); try { createVeteranFormBean.setBirthDate(sdf.parse(createVeteranFormBean.getBirthDateString())); } catch (ParseException pe) { result.rejectValue("birthDateString", "birthDateString", "A valid Date of Birth is required."); logger.error("Failed to parse birthDateString", pe); } } // If there is an error, return the same view. if (result.hasErrors()) { return "dashboard/createVeteran"; } // Save to database, get veteranId, and then redirect to next page. try { Integer veteranId = veteranService.add(createVeteranFormBean); return "redirect:/dashboard/veteranDetail?vid=" + veteranId; } catch (DataIntegrityViolationException dve) { if (dve.getCause() instanceof ConstraintViolationException) { logger.error("Veteran being created already exists", dve); result.rejectValue(null, null, "Veteran already exists."); } else { throw dve; } } // If we get here there is an error, return the same view. return "dashboard/createVeteran"; } /** * User clicked on the cancel button. Redirect to select veteran page. * * @param model * @return */ @RequestMapping(value = "/createVeteran", method = RequestMethod.POST, params = "cancelButton") public String cancelCreateVeteran(Model model) { logger.debug("In cancelCreateVeteran"); return "redirect:/dashboard/selectVeteran"; } }
package us.bpsm.edn.protocols.c3; import static org.junit.Assert.*; import java.util.Arrays; import org.junit.Test; public class C3Test { @Test public void testMroExample1() { assertEquals( Arrays.asList( X1.A.class, X1.B.class, X1.C.class, X1.D.class, X1.E.class, X1.F.class, X1.O.class), C3.methodResolutionOrder(X1.A.class)); } interface X1 { interface O {} interface F extends O {} interface E extends O {} interface D extends O {} interface C extends D, F {} interface B extends D, E {} interface A extends B, C {} } @Test public void testMroExample2() { assertEquals( Arrays.asList( X2.A.class, X2.B.class, X2.E.class, X2.C.class, X2.D.class, X2.F.class, X2.O.class), C3.methodResolutionOrder(X2.A.class)); } interface X2 { interface O {} interface F extends O {} interface E extends O {} interface D extends O {} interface C extends D, F {} // X2 has B extend "E, D", while X1 extends "D, E" interface B extends E, D {} interface A extends B, C {} } @Test public void testMroExample3() { assertEquals( Arrays.<Class<?>>asList(X3.A.class, X3.O.class), C3.methodResolutionOrder(X3.A.class)); assertEquals( Arrays.<Class<?>>asList(X3.B.class, X3.O.class), C3.methodResolutionOrder(X3.B.class)); assertEquals( Arrays.<Class<?>>asList(X3.C.class, X3.O.class), C3.methodResolutionOrder(X3.C.class)); assertEquals( Arrays.<Class<?>>asList(X3.D.class, X3.O.class), C3.methodResolutionOrder(X3.D.class)); assertEquals( Arrays.<Class<?>>asList(X3.E.class, X3.O.class), C3.methodResolutionOrder(X3.E.class)); assertEquals( Arrays.<Class<?>>asList(X3.K1.class, X3.A.class, X3.B.class, X3.C.class, X3.O.class), C3.methodResolutionOrder(X3.K1.class)); assertEquals( Arrays.<Class<?>>asList(X3.K2.class, X3.D.class, X3.B.class, X3.E.class, X3.O.class), C3.methodResolutionOrder(X3.K2.class)); assertEquals( Arrays.<Class<?>>asList(X3.K3.class, X3.D.class, X3.A.class, X3.O.class), C3.methodResolutionOrder(X3.K3.class)); assertEquals( Arrays.asList(X3.Z.class, X3.K1.class, X3.K2.class, X3.K3.class, X3.D.class, X3.A.class, X3.B.class, X3.C.class, X3.E.class, X3.O.class), C3.methodResolutionOrder(X3.Z.class)); } interface X3 { interface O {} interface A extends O {} interface B extends O {} interface C extends O {} interface D extends O {} interface E extends O {} interface K1 extends A,B,C {} interface K2 extends D, B, E {} interface K3 extends D, A {} interface Z extends K1, K2, K3 {} } @Test(expected=RuntimeException.class) public void testMroExample4OrderDisagreement() { C3.methodResolutionOrder(X4.Z.class); } /** order disagreement */ interface X4 { interface O {} interface X extends O {} interface Y extends O {} interface A extends X, Y {} interface B extends Y, X {} interface Z extends A, B {} } }
package uk.org.cinquin.mutinack; import static contrib.uk.org.lidalia.slf4jext.Level.TRACE; import static uk.org.cinquin.mutinack.MutationType.SUBSTITUTION; import static uk.org.cinquin.mutinack.candidate_sequences.PositionAssay.AT_LEAST_ONE_DISAG; import static uk.org.cinquin.mutinack.candidate_sequences.PositionAssay.DISAG_THAT_MISSED_Q2; import static uk.org.cinquin.mutinack.candidate_sequences.PositionAssay.MIN_DUPLEXES_SISTER_SAMPLE; import static uk.org.cinquin.mutinack.candidate_sequences.PositionAssay.TOO_HIGH_COVERAGE; import static uk.org.cinquin.mutinack.misc_util.DebugLogControl.ENABLE_TRACE; import static uk.org.cinquin.mutinack.misc_util.DebugLogControl.NONTRIVIAL_ASSERTIONS; import static uk.org.cinquin.mutinack.misc_util.Util.mediumLengthFloatFormatter; import static uk.org.cinquin.mutinack.qualities.Quality.GOOD; import static uk.org.cinquin.mutinack.qualities.Quality.POOR; import java.io.IOException; import java.io.OutputStreamWriter; import java.text.NumberFormat; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Phaser; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.stream.Collectors; import java.util.stream.Stream; import org.eclipse.collections.api.RichIterable; import org.eclipse.collections.api.list.ListIterable; import org.eclipse.collections.api.list.MutableList; import org.eclipse.collections.api.list.primitive.MutableFloatList; import org.eclipse.collections.api.map.MutableMap; import org.eclipse.collections.api.set.MutableSet; import org.eclipse.collections.impl.block.factory.Procedures; import org.eclipse.collections.impl.factory.Lists; import org.eclipse.collections.impl.factory.Sets; import org.eclipse.collections.impl.list.mutable.FastList; import org.eclipse.collections.impl.map.mutable.UnifiedMap; import org.eclipse.jdt.annotation.NonNull; import org.eclipse.jdt.annotation.Nullable; import contrib.net.sf.samtools.SAMRecord; import contrib.uk.org.lidalia.slf4jext.Level; import contrib.uk.org.lidalia.slf4jext.Logger; import contrib.uk.org.lidalia.slf4jext.LoggerFactory; import uk.org.cinquin.mutinack.candidate_sequences.CandidateSequence; import uk.org.cinquin.mutinack.candidate_sequences.PositionAssay; import uk.org.cinquin.mutinack.features.BedReader; import uk.org.cinquin.mutinack.features.GenomeFeatureTester; import uk.org.cinquin.mutinack.misc_util.Assert; import uk.org.cinquin.mutinack.misc_util.ComparablePair; import uk.org.cinquin.mutinack.misc_util.Handle; import uk.org.cinquin.mutinack.misc_util.IntMinMax; import uk.org.cinquin.mutinack.misc_util.ObjMinMax; import uk.org.cinquin.mutinack.misc_util.Pair; import uk.org.cinquin.mutinack.misc_util.SettableInteger; import uk.org.cinquin.mutinack.misc_util.Util; import uk.org.cinquin.mutinack.misc_util.collections.PositionAssayToQualityMap; import uk.org.cinquin.mutinack.misc_util.exceptions.AssertionFailedException; import uk.org.cinquin.mutinack.output.CrossSampleLocationAnalysis; import uk.org.cinquin.mutinack.output.LocationAnalysis; import uk.org.cinquin.mutinack.output.LocationExaminationResults; import uk.org.cinquin.mutinack.qualities.Quality; import uk.org.cinquin.mutinack.sequence_IO.TrimOverlappingReads; import uk.org.cinquin.mutinack.statistics.Histogram; public class SubAnalyzerPhaser extends Phaser { private static final Logger logger = LoggerFactory.getLogger("SubAnalyzerPhaser"); private static final byte[] QUESTION_MARK = {'?'}; private final @NonNull AnalysisChunk analysisChunk; private final @NonNull MutinackGroup groupSettings; private final @NonNull SettableInteger previousLastProcessable; private final @NonNull Map<SequenceLocation, Boolean> forceOutputAtLocations; private final @NonNull Histogram dubiousOrGoodDuplexCovInAllInputs; private final @NonNull Histogram goodDuplexCovInAllInputs; private final @NonNull Parameters param; private final @NonNull List<GenomeFeatureTester> excludeBEDs; private final @NonNull List<@NonNull BedReader> repetitiveBEDs; private final int contigIndex; private final @NonNull String contigName; private final int PROCESSING_CHUNK; private final boolean outputReads; private final int nSubAnalyzers; private int nIterations = 0; private final @NonNull AtomicInteger dn = new AtomicInteger(0); public SubAnalyzerPhaser(@NonNull Parameters param, @NonNull AnalysisChunk analysisChunk, boolean outputReads, @NonNull Map<SequenceLocation, Boolean> forceOutputAtLocations, @NonNull Histogram dubiousOrGoodDuplexCovInAllInputs, @NonNull Histogram goodDuplexCovInAllInputs, @NonNull String contigName, int contigIndex, @NonNull List<GenomeFeatureTester> excludeBEDs, @NonNull List<@NonNull BedReader> repetitiveBEDs, int PROCESSING_CHUNK) { this.param = param; this.analysisChunk = analysisChunk; this.groupSettings = analysisChunk.groupSettings; previousLastProcessable = new SettableInteger(-1); this.outputReads = outputReads; this.dubiousOrGoodDuplexCovInAllInputs = dubiousOrGoodDuplexCovInAllInputs; this.goodDuplexCovInAllInputs = goodDuplexCovInAllInputs; this.forceOutputAtLocations = forceOutputAtLocations; nSubAnalyzers = analysisChunk.subAnalyzers.size(); this.contigIndex = contigIndex; this.contigName = contigName; this.excludeBEDs = excludeBEDs; this.repetitiveBEDs = repetitiveBEDs; this.PROCESSING_CHUNK = PROCESSING_CHUNK; } @Override protected final boolean onAdvance(final int phase, final int registeredParties) { //This is the place to make comparisons between analyzer results if (dn.get() >= 1_000) { dn.set(0);//Reset duplex number written in output BAM to 0, just so it stays //within a reasonable range (at the cost of numbers not being unique across //the file). } final boolean returnValue; boolean completedNormally = false; try { final int saveLastProcessedPosition = analysisChunk.lastProcessedPosition; for (int statsIndex = 0; statsIndex < analysisChunk.nParameterSets; statsIndex++) { Boolean insertion = null; for (SubAnalyzer subAnalyzer: analysisChunk.subAnalyzers) { subAnalyzer.stats = subAnalyzer.analyzer.stats.get(statsIndex); subAnalyzer.param = Objects.requireNonNull(subAnalyzer.stats.analysisParameters); if (insertion == null) { insertion = subAnalyzer.stats.forInsertions; } else { Assert.isTrue(subAnalyzer.stats.forInsertions == insertion); } } final int targetStopPosition = Math.min(analysisChunk.pauseAtPosition, analysisChunk.terminateAtPosition); Assert.isTrue(analysisChunk.pauseAtPosition >= saveLastProcessedPosition); analysisChunk.lastProcessedPosition = saveLastProcessedPosition; for (int i = 0; i < nSubAnalyzers; i++) { SubAnalyzer sub = analysisChunk.subAnalyzers.get(i); if (NONTRIVIAL_ASSERTIONS && nIterations > 1 && sub.candidateSequences.containsKey( new SequenceLocation(contigIndex, contigName, analysisChunk.lastProcessedPosition))) { throw new AssertionFailedException(); } if (saveLastProcessedPosition + 1 <= targetStopPosition && (statsIndex == 0 || !sub.stats.canSkipDuplexLoading)) { sub.load(saveLastProcessedPosition + 1, targetStopPosition); } } outer: for (int position = saveLastProcessedPosition + 1; position <= targetStopPosition && !groupSettings.terminateAnalysis; position ++) { final @NonNull SequenceLocation location = new SequenceLocation(contigIndex, contigName, position, Objects.requireNonNull(insertion)); for (GenomeFeatureTester tester: excludeBEDs) { if (tester.test(location)) { analysisChunk.subAnalyzers. forEach(sa -> { sa.candidateSequences.remove(location); sa.stats.nPosExcluded.add(location, 1); }); analysisChunk.lastProcessedPosition = position; continue outer; } } onAdvance1(location); analysisChunk.lastProcessedPosition = position; if (outputReads && statsIndex == 0) {//Only output reads once; note //however that different parameter sets may lead to different duplex //grouping, which will not be apparent in BAM output final @NonNull SequenceLocation locationNoPH = new SequenceLocation(contigIndex, contigName, position, false); prepareReadsToWrite( locationNoPH, analysisChunk, param.collapseFilteredReads, param.writeBothStrands, param.clipPairOverlap, dn); } } } if (ENABLE_TRACE && shouldLog(TRACE)) { logger.trace("Going from " + saveLastProcessedPosition + " to " + analysisChunk.lastProcessedPosition + " for chunk " + analysisChunk); } analysisChunk.subAnalyzersParallel. forEach(subAnalyzer -> { final int localLastProcessedPosition = analysisChunk.lastProcessedPosition; subAnalyzer.candidateSequences.retainEntries((key, value) -> key.position > localLastProcessedPosition); if (shouldLog(TRACE)) { logger.trace("SubAnalyzer " + analysisChunk + " completed " + (saveLastProcessedPosition + 1) + " to " + analysisChunk.lastProcessedPosition); } final int localPauseAt = analysisChunk.pauseAtPosition; final int maxInsertSize = param.maxInsertSize; subAnalyzer.extSAMCache.retainEntries((key, val) -> { final boolean keep = val.getAlignmentStart() + maxInsertSize > localPauseAt; if (!keep) { val.discarded = true; } return keep; }); if (outputReads) { subAnalyzer.writeOutputReads(); } });//End loop over subAnalyzers if (nIterations < 2) { nIterations++; final SequenceLocation lowerBound = new SequenceLocation(contigIndex, contigName, analysisChunk.lastProcessedPosition); analysisChunk.subAnalyzers. forEach(subAnalyzer -> subAnalyzer.candidateSequences.retainEntries((key, val) -> { Assert.isTrue(key.contigIndex == contigIndex, "Problem with contig indices, " + key + ' ' + key.contigIndex + ' ' + contigIndex); return key.compareTo(lowerBound) >= 0; })); } Assert.noException(() -> { //Check no sequences have been left behind analysisChunk.subAnalyzersParallel. forEach(subAnalyzer -> { final SequenceLocation lowerBound = new SequenceLocation(contigIndex, contigName, analysisChunk.lastProcessedPosition); subAnalyzer.candidateSequences.forEach( e -> { Assert.isFalse(e.contigIndex != contigIndex); Assert.isFalse(e.compareTo(lowerBound) < 0/*, "pauseAt: %s; lastProcessedPosition: %s but found: %s for chunk %s", pauseAt.get(), lastProcessedPosition, e, analysisChunk*/);}); }); }); final int maxLastProcessable = new IntMinMax<SubAnalyzer>(). acceptMax(analysisChunk.subAnalyzers, sa -> ((SubAnalyzer) sa).lastProcessablePosition.get()).getMax(); if (maxLastProcessable == previousLastProcessable.get()) { logger.debug("Phaser " + this + " will terminate"); returnValue = true; } else { previousLastProcessable.set(maxLastProcessable); analysisChunk.pauseAtPosition = maxLastProcessable + PROCESSING_CHUNK; returnValue = false; } completedNormally = true; } finally { if (completedNormally) { for (SubAnalyzer subAnalyzer: analysisChunk.subAnalyzers) { subAnalyzer.stats = subAnalyzer.analyzer.stats.get(0); subAnalyzer.param = Objects.requireNonNull(subAnalyzer.stats.analysisParameters); } } else { forceTermination(); } } return returnValue; }//End onAdvance private void onAdvance1( final @NonNull SequenceLocation location ) { final @NonNull MutableMap<SubAnalyzer, LocationExaminationResults> locationExamResultsMap0 = new UnifiedMap<>(); //Fill the map so that at next step values can be modified concurrently //without causing structural modifications, obviating the need for synchronization analysisChunk.subAnalyzers.forEach(sa -> locationExamResultsMap0.put(sa, null)); analysisChunk.subAnalyzersParallel.forEach(sa -> { LocationExaminationResults results = sa.examineLocation(location); if (NONTRIVIAL_ASSERTIONS) { for (CandidateSequence c: results.analyzedCandidateSequences) { Assert.isTrue(c.getOwningAnalyzer() == sa.analyzer); } } locationExamResultsMap0.put(sa, results); }); @SuppressWarnings("null") final @NonNull MutableMap<SubAnalyzer, @NonNull LocationExaminationResults> locationExamResultsMap = param.enableCostlyAssertions ? locationExamResultsMap0.asUnmodifiable() : locationExamResultsMap0; final ListIterable<@NonNull LocationExaminationResults> locationExamResults = Lists.immutable. withAll(locationExamResultsMap.values()); final int dubiousOrGoodInAllInputsAtPos = new IntMinMax<LocationExaminationResults>(). acceptMin(locationExamResults, ler -> ((LocationExaminationResults) ler).nGoodOrDubiousDuplexes). getMin(); final int goodDuplexCovInAllInputsAtPos = new IntMinMax<LocationExaminationResults>(). acceptMin(locationExamResults, ler -> ((LocationExaminationResults) ler).nGoodDuplexes). getMin(); dubiousOrGoodDuplexCovInAllInputs.insert(dubiousOrGoodInAllInputsAtPos); goodDuplexCovInAllInputs.insert(goodDuplexCovInAllInputsAtPos); final Handle<Boolean> mutationToAnnotate = new Handle<>(false); locationExamResultsMap.forEachKeyValue((sa, ler) -> { SettableInteger sum = new SettableInteger(0); locationExamResultsMap.forEachKeyValue((k , v) -> {if (k != sa) sum.addAndGet(v.nGoodOrDubiousDuplexes);}); ler.nGoodOrDubiousDuplexesSisterSamples = sum.get(); ler.analyzedCandidateSequences.each(c -> c.setnDuplexesSisterSamples(sum.get())); }); analysisChunk.subAnalyzers.forEach( sa -> { sa.incrementednPosDuplexQualityQ2OthersQ1Q2 = false; sa.c1 = false; sa.c2 = false; sa.c3 = false; sa.c4 = false; sa.processed = false; registerAndAnalyzeCoverage( Objects.requireNonNull(locationExamResultsMap.get(sa)), mutationToAnnotate, Objects.requireNonNull(sa.stats), location, locationExamResultsMap, sa.analyzer, groupSettings.mutationsToAnnotate, sa.analyzer.codingStrandTester, repetitiveBEDs); } ); RichIterable<CandidateSequence> mutantCandidates = locationExamResults. flatCollect(c -> c.analyzedCandidateSequences). select(c -> { boolean isMutant = !c.getMutationType().isWildtype(); return isMutant; }); final Quality maxCandMutQuality = Objects.requireNonNull(new ObjMinMax<> (Quality.ATROCIOUS, Quality.ATROCIOUS, Quality::compareTo). acceptMax(mutantCandidates, c -> { CandidateSequence c0 = (CandidateSequence) c; return c0.getMutationType().reportable() ? c0.getQuality().getValue() : Quality.ATROCIOUS; }). getMax()); @SuppressWarnings("null")//getMin cannot return null because constructor with initial min is used final float minTopAlleleFreq = new ObjMinMax<>(Float.MAX_VALUE, - Float.MAX_VALUE, Float::compareTo). acceptMin(locationExamResults, cl -> { MutableFloatList freq = ((LocationExaminationResults) cl).alleleFrequencies; if (freq != null) { float f = ((LocationExaminationResults) cl).alleleFrequencies.getLast(); return Float.isNaN(f) ? Float.MAX_VALUE : f; } else { return Float.MAX_VALUE; } }).getMin(); final boolean lowTopAlleleFreq = minTopAlleleFreq != 0 && minTopAlleleFreq < param.topAlleleFreqReport; locationExamResults.flatCollect(c -> c.analyzedCandidateSequences).forEach(candidate -> { if (candidate.getQuality().getNonNullValue().atLeast(Quality.GOOD)) { final @NonNull AnalysisStats stats = Objects.requireNonNull( candidate.getOwningSubAnalyzer().stats); candidate.computeNQ1PlusConcurringDuplexes(stats.concurringDuplexDistance, param); } }); final boolean forceReporting = forceOutputAtLocations.get(location) != null || mutationToAnnotate.get() || lowTopAlleleFreq; @SuppressWarnings("null") final boolean randomlySelected = forceOutputAtLocations.get(location) != null && forceOutputAtLocations.get(location); if (forceReporting || maxCandMutQuality.atLeast(GOOD)) { if (NONTRIVIAL_ASSERTIONS) { for (GenomeFeatureTester t: excludeBEDs) { Assert.isFalse(t.test(location), "%s excluded by %s"/*, location, t*/); } } processAndReportCandidates(analysisChunk.subAnalyzers.get(0).stats.analysisParameters, locationExamResults, locationExamResultsMap, location, randomlySelected, lowTopAlleleFreq, true, repetitiveBEDs, analysisChunk, groupSettings.mutationsToAnnotate); } } private final static Set<PositionAssay> SISTER_SAMPLE_ASSAY_SET = Collections.singleton( PositionAssay.PRESENT_IN_SISTER_SAMPLE); private static void processAndReportCandidates( final @NonNull Parameters param, final @NonNull ListIterable<@NonNull LocationExaminationResults> locationExamResults, final @NonNull MutableMap<SubAnalyzer, @NonNull LocationExaminationResults> locationExamResultsMap, final @NonNull SequenceLocation location, final boolean randomlySelected, final boolean lowTopAlleleFreq, final boolean doOutput, final @NonNull List<@NonNull BedReader> repetitiveBEDs, final @NonNull AnalysisChunk analysisChunk, final @NonNull ConcurrentMap<Pair<SequenceLocation, String>, @NonNull List<@NonNull Pair<@NonNull Mutation, @NonNull String>>> mutationsToAnnotate ) { final MutableList<CandidateSequence> candidateSequences = locationExamResults. flatCollect(l -> l.analyzedCandidateSequences, new FastList<>()). sortThis(Comparator.comparing(CandidateSequence::getMutationType)); //Refilter also allowing Q1 candidates to compare output of different //analyzers final MutableList<CandidateSequence> allQ1Q2Candidates = candidateSequences. select(c -> { Assert.isTrue(c.getLocation().distanceOnSameContig(location) == 0); return c.getQuality().getNonNullValue().greaterThan(POOR) && !c.isHidden(); }, new FastList<>()); final RichIterable<@NonNull DuplexDisagreement> allQ2DuplexDisagreements = //Distinct disagreements locationExamResults. flatCollect(ler -> ler.disagreements.keySet()). select(d -> d.quality.atLeast(GOOD), Sets.mutable.empty());//Filtering is a NOP right now since all disags are Q2 final CrossSampleLocationAnalysis csla = new CrossSampleLocationAnalysis(location); csla.randomlySelected = randomlySelected; csla.lowTopAlleleFreq = lowTopAlleleFreq; if (allQ1Q2Candidates.noneSatisfy(c -> c.getMutationType().isWildtype())) { csla.noWt = true; } MutableSet<CandidateSequence> distinctCandidates = candidateSequences.select(c -> !c.isHidden() && (c.getQuality().getNonNullValue().greaterThan(POOR) || c.getQuality().qualitiesContain(DISAG_THAT_MISSED_Q2)), Sets.mutable.empty()); //Collect into a set to get uniqueness PositionAssayToQualityMap extraPositionQualities = new PositionAssayToQualityMap(); distinctCandidates.each(candidate -> { final int candidateCount = allQ1Q2Candidates.count(c -> c.equals(candidate)); if (!candidate.getMutationType().isWildtype() && candidateCount > 1) { extraPositionQualities.put(PositionAssay.PRESENT_IN_SISTER_SAMPLE, Quality.DUBIOUS); } }); final boolean multipleQ2Mutations = distinctCandidates. count(c -> !c.getMutationType().isWildtype() && //Following doesn't do anything at this point since PRESENT_IN_SISTER_SAMPLE has not been set c.getQuality().getValueIgnoring(SISTER_SAMPLE_ASSAY_SET).atLeast(GOOD)) > 1; if (multipleQ2Mutations) { csla.multipleQ2MutantsAtSamePos = true; extraPositionQualities.put(PositionAssay.MULTIPLE_Q2_MUT_AT_POS, Quality.DUBIOUS); } if (!extraPositionQualities.isEmpty()) { candidateSequences.forEach(c -> extraPositionQualities.forEach((a, q) -> c.getQuality().addUnique(a, q))); } distinctCandidates.each(Procedures.throwing(candidate -> { final int candidateCount = allQ1Q2Candidates.count(c -> c.equals(candidate)); if (!candidate.getMutationType().isWildtype() && allQ1Q2Candidates.count(c -> c.equals(candidate) && (c.getQuality().getNonNullValue().atLeast(GOOD))) >= 2) { csla.twoOrMoreSamplesWithSameQ2MutationCandidate = true; } else if (candidateCount == 1 &&//Mutant candidate shows up only once (and therefore in only 1 analyzer) !candidate.getMutationType().isWildtype() && candidate.getQuality().getNonNullValue().atLeast(GOOD) && (!param.candidateQ2Criterion.equals("1Q2Duplex") || candidate.getnGoodDuplexes() >= param.minQ2DuplexesToCallMutation) && candidate.getnGoodOrDubiousDuplexes() >= param.minQ1Q2DuplexesToCallMutation && candidate.getQuality().downgradeUniqueIfFalse(TOO_HIGH_COVERAGE, !locationExamResultsMap.get(candidate.getOwningSubAnalyzer()).tooHighCoverage) && candidate.getQuality().downgradeUniqueIfFalse(MIN_DUPLEXES_SISTER_SAMPLE, candidate.getnDuplexesSisterSamples() >= param.minNumberDuplexesSisterSamples) && (!param.Q2DisagCapsMatchingMutationQuality || (//Disagreements from the same sample will have already-downgraded mutation quality, //if analysis parameter dictates it, but check again in case a matching disagreement is //present in another sample candidate.getQuality().downgradeIfFalse(AT_LEAST_ONE_DISAG, !allQ2DuplexDisagreements.anySatisfy( disag -> disag.snd.equals(candidate.getMutation()) || disag.fst.equals(candidate.getMutation()))) ) ) ) { final @NonNull AnalysisStats stats = Objects.requireNonNull( candidate.getOwningSubAnalyzer().stats); SubAnalyzer sa0 = candidate.getOwningSubAnalyzer(); if (!sa0.incrementednPosDuplexQualityQ2OthersQ1Q2) { @NonNull LocationExaminationResults ler = locationExamResultsMap.get(candidate.getOwningSubAnalyzer()); boolean c1 = ler.nGoodDuplexes >= stats.analysisParameters.minQ2DuplexesToCallMutation; boolean c2 = ler.nGoodOrDubiousDuplexes >= stats.analysisParameters.minQ1Q2DuplexesToCallMutation; boolean c3 = ler.nGoodOrDubiousDuplexesSisterSamples >= stats.analysisParameters.minNumberDuplexesSisterSamples; throw new AssertionFailedException(c1 + " " + c2 + " " + c3 + " --- " + sa0.processed + " " + sa0.c1 + " " + sa0.c2 + " " + sa0.c3 + " " + sa0.c4); } csla.nDuplexesUniqueQ2MutationCandidate.add( candidate.computeNQ1PlusConcurringDuplexes(stats.concurringMutationDuplexDistance, param)); candidate.setGoodCandidateForUniqueMutation(true); stats.nPosCandidatesForUniqueMutation.accept(location, candidate.getnGoodDuplexes()); stats.uniqueMutantQ2CandidateQ1Q2DCoverage.insert(candidate.getTotalGoodOrDubiousDuplexes()); if (!repetitiveBEDs.isEmpty()) { boolean repetitive = false; for (GenomeFeatureTester t: repetitiveBEDs) { if (t.test(location)) { repetitive = true; break; } } if (repetitive) { stats.uniqueMutantQ2CandidateQ1Q2DCoverageRepetitive.insert(candidate.getTotalGoodOrDubiousDuplexes()); } else { stats.uniqueMutantQ2CandidateQ1Q2DCoverageNonRepetitive.insert(candidate.getTotalGoodOrDubiousDuplexes()); } } analysisChunk.subAnalyzers.select(sa -> Objects.requireNonNull(locationExamResultsMap.get(sa)). analyzedCandidateSequences.contains(candidate)). forEach(sa -> { final LocationExaminationResults examResults = Objects.requireNonNull(locationExamResultsMap.get(sa)); final AnalysisStats stats0 = sa.stats; stats0.nReadsAtPosWithSomeCandidateForQ2UniqueMutation.insert( (int) examResults.analyzedCandidateSequences.sumOfInt( c -> c.getNonMutableConcurringReads().size())); stats0.nQ1Q2AtPosWithSomeCandidateForQ2UniqueMutation.insert( (int) examResults.analyzedCandidateSequences. sumOfInt(CandidateSequence::getnGoodOrDubiousDuplexes)); }); }//End Q2 candidate boolean oneSampleNoWt = false; for (LocationExaminationResults results: locationExamResults) { if (results.analyzedCandidateSequences. noneSatisfy(c -> c.getMutationType().isWildtype() && c.getnGoodOrDubiousDuplexes() > 0)) { oneSampleNoWt = true; break; } } csla.oneSampleNoWt = oneSampleNoWt; final String baseOutput = csla.toString() + '\t' + location + '\t' + candidate.getKind() + '\t' + (!candidate.getMutationType().isWildtype() ? candidate.getChange() : ""); //Now output information for the candidate for each analyzer //(unless no reads matched the candidate) for (@NonNull SubAnalyzer sa: analysisChunk.subAnalyzers) { final @NonNull LocationExaminationResults examResults = Objects.requireNonNull(locationExamResultsMap.get(sa)); final List<CandidateSequence> l = examResults.analyzedCandidateSequences. select(c -> c.equals(candidate)).toList(); final CandidateSequence matchingSACandidate; final int nCandidates = l.size(); if (nCandidates > 1) { throw new AssertionFailedException(); } else if (nCandidates == 0) { //Analyzer does not have matching candidate (i.e. it did not get //any reads matching the mutation) continue; } else {//1 candidate matchingSACandidate = l.get(0); matchingSACandidate.setnMatchingCandidatesOtherSamples(candidateCount); } if (!sa.stats.detections.computeIfAbsent(location, loc -> new LocationAnalysis(csla, Util.serializeAndDeserialize(examResults))). setCrossSampleLocationAnalysis(csla).candidates.add( Util.serializeAndDeserialize(matchingSACandidate))) { throw new AssertionFailedException(); } final Pair<SequenceLocation, String> fullLocation = new Pair<>(location, sa.analyzer.name); @Nullable List<@NonNull Pair<@NonNull Mutation, @NonNull String>> toAnnotateList = mutationsToAnnotate.get(fullLocation); if (toAnnotateList != null) { for (Pair<Mutation, String> toAnnotate : toAnnotateList) { final Mutation mut; if ((mut = toAnnotate.fst).mutationType. equals(matchingSACandidate.getMutationType()) && Arrays.equals(mut.mutationSequence, matchingSACandidate.getSequence())) { matchingSACandidate.setPreexistingDetection(toAnnotate.snd); } } } matchingSACandidate.recordMatchingGenomeIntervals(sa.analyzer.filtersForCandidateReporting); if (doOutput && (sa.stats.detectionOutputStream != null || sa.stats.annotationOutputStream != null)) { outputCandidate(sa.analyzer, matchingSACandidate, location, sa.stats, csla.toString(), baseOutput, examResults); } }//End loop over subAnalyzers }));//End loop over mutation candidates } private static void outputCandidate( final @NonNull Mutinack analyzer, final @NonNull CandidateSequence candidate, final @NonNull SequenceLocation location, final @NonNull AnalysisStats stats, final @NonNull String baseOutput0, final @NonNull String baseOutput, final @NonNull LocationExaminationResults examResults ) throws IOException { final String line = baseOutput + '\t' + analyzer.name + '\t' + candidate.toOutputString(stats.analysisParameters, examResults); if (stats.detectionOutputStream != null) { stats.detectionOutputStream.println(line); } try { final @Nullable OutputStreamWriter ambw = stats.mutationBEDWriter; if (ambw != null) { ambw.append(location.getContigName() + '\t' + (location.position + 1) + '\t' + (location.position + 1) + '\t' + candidate.getKind() + '\t' + baseOutput0 + '\t' + candidate.getnGoodDuplexes() + '\n'); } } catch (IOException e) { throw new RuntimeException(e); } if (candidate.getPreexistingDetection() != null) { stats.annotationOutputStream.append(candidate.getPreexistingDetection() + "\t" + line + '\n'); } } private static float nanTo99(float f) { return Float.isNaN(f) ? 9.9f : f; } private static void registerAndAnalyzeCoverage( final @NonNull LocationExaminationResults examResults, final @NonNull Handle<Boolean> mutationToAnnotate, final @NonNull AnalysisStats stats, final @NonNull SequenceLocation location, final @NonNull MutableMap<SubAnalyzer, @NonNull LocationExaminationResults> analyzerCandidateLists, final @NonNull Mutinack a, final @NonNull ConcurrentMap<Pair<SequenceLocation, String>, @NonNull List<@NonNull Pair<@NonNull Mutation, @NonNull String>>> mutationsToAnnotate, final @Nullable GenomeFeatureTester codingStrandTester, final @NonNull List<BedReader> repetitiveBEDs ) { if (mutationsToAnnotate.containsKey(new Pair<>(location, a.name))) { mutationToAnnotate.set(true); } final @Nullable OutputStreamWriter cbw = stats.coverageBEDWriter; if (cbw != null) { try { cbw.append(location.getContigName() + '\t' + (location.position + 1) + '\t' + (location.position + 1) + '\t' + examResults.nGoodOrDubiousDuplexes + '\n'); } catch (IOException e) { throw new RuntimeException(e); } } if (stats.positionByPositionCoverage != null) { int[] array = Objects.requireNonNull(stats.positionByPositionCoverage.get( location.getContigName())); if (array.length <= location.position) { throw new IllegalArgumentException("Position goes beyond end of contig " + location.getContigName() + ": " + location.position + " vs " + array.length); } else { array[location.position] += examResults.nGoodOrDubiousDuplexes; } } MutableFloatList alleleFrequencies = examResults.alleleFrequencies; if (alleleFrequencies != null) { List<Integer> freq = new FastList<>(2); freq.add((int) (10f * nanTo99(alleleFrequencies.get(alleleFrequencies.size() - 2)))); freq.add((int) (10f * nanTo99(alleleFrequencies.getLast()))); stats.alleleFrequencies.accept(location, freq); } examResults.analyzedCandidateSequences.each(c -> { if (c.getQuality().getNonNullValue().atLeast(GOOD)) { if (c.getMutationType().isWildtype()) { stats.wtQ2CandidateQ1Q2Coverage.insert(examResults.nGoodOrDubiousDuplexes); if (!repetitiveBEDs.isEmpty()) { boolean repetitive = false; for (GenomeFeatureTester t: repetitiveBEDs) { if (t.test(location)) { repetitive = true; break; } } if (repetitive) { stats.wtQ2CandidateQ1Q2CoverageRepetitive.insert(examResults.nGoodOrDubiousDuplexes); } else { stats.wtQ2CandidateQ1Q2CoverageNonRepetitive.insert(examResults.nGoodOrDubiousDuplexes); } } } else { stats.mutantQ2CandidateQ1Q2Coverage.insert(examResults.nGoodOrDubiousDuplexes); if (!repetitiveBEDs.isEmpty()) { boolean repetitive = false; for (GenomeFeatureTester t: repetitiveBEDs) { if (t.test(location)) { repetitive = true; break; } } if (repetitive) { stats.mutantQ2CandidateQ1Q2DCoverageRepetitive.insert(examResults.nGoodOrDubiousDuplexes); } else { stats.mutantQ2CandidateQ1Q2DCoverageNonRepetitive.insert(examResults.nGoodOrDubiousDuplexes); } } } } }); final boolean localTooHighCoverage = examResults.nGoodOrDubiousDuplexes > a.maxNDuplexes; examResults.tooHighCoverage = localTooHighCoverage; analyzerCandidateLists.forEachKey(sa -> { sa.c1 = localTooHighCoverage; sa.processed = true; }); if (localTooHighCoverage) { stats.nPosIgnoredBecauseTooHighCoverage.increment(location); } if (!localTooHighCoverage) { //a.stats.nPosDuplexesCandidatesForDisagreementQ2.accept(location, examResults.nGoodDuplexesIgnoringDisag); registerOutputDisagreements(examResults, stats, location); } else { stats.nPosDuplexCandidatesForDisagreementQ2TooHighCoverage.accept(location, examResults.nGoodDuplexesIgnoringDisag); for (@NonNull DuplexDisagreement d: examResults.disagreements.keys()) { stats.topBottomDisagreementsQ2TooHighCoverage.accept(location, d); } } analyzerCandidateLists.forEachKey(sa -> { sa.c2 = !stats.analysisParameters.candidateQ2Criterion.equals("1Q2Duplex") ||//XXX Needs more work examResults.nGoodDuplexes >= stats.analysisParameters.minQ2DuplexesToCallMutation; sa.c3 = examResults.nGoodOrDubiousDuplexes >= stats.analysisParameters.minQ1Q2DuplexesToCallMutation; sa.c4 = examResults.nGoodOrDubiousDuplexesSisterSamples >= stats.analysisParameters.minNumberDuplexesSisterSamples; }); if ((!localTooHighCoverage) && (!stats.analysisParameters.candidateQ2Criterion.equals("1Q2Duplex") ||//XXX Needs more work examResults.nGoodDuplexes >= stats.analysisParameters.minQ2DuplexesToCallMutation) && examResults.nGoodOrDubiousDuplexes >= stats.analysisParameters.minQ1Q2DuplexesToCallMutation && examResults.nGoodOrDubiousDuplexesSisterSamples >= stats.analysisParameters.minNumberDuplexesSisterSamples ) { examResults.analyzedCandidateSequences.select(c -> !c.isHidden()). flatCollect(CandidateSequence::getDuplexes). collectIf(dr -> dr.localAndGlobalQuality.getNonNullValue().atLeast(GOOD), DuplexRead::getMaxDistanceToLigSite). forEach(i -> {if (i != Integer.MIN_VALUE && i != Integer.MAX_VALUE) stats.crossAnalyzerQ2CandidateDistanceToLigationSite.insert(i);}); analyzerCandidateLists.forEachKey(sa -> { sa.incrementednPosDuplexQualityQ2OthersQ1Q2 = true; }); stats.nPosDuplexQualityQ2OthersQ1Q2.accept(location, examResults.nGoodDuplexes); stats.nPosQualityQ2OthersQ1Q2.increment(location); if (codingStrandTester != null && codingStrandTester.getNegativeStrand(location).isPresent()) { stats.nPosDuplexQualityQ2OthersQ1Q2CodingOrTemplate.accept(location, examResults.nGoodDuplexes); } //XXX The following includes all candidates at *all* positions considered in //processing chunk stats.nReadsAtPosQualityQ2OthersQ1Q2.insert( (int) examResults.analyzedCandidateSequences.sumOfInt( c -> c.getNonMutableConcurringReads().size())); stats.nQ1Q2AtPosQualityQ2OthersQ1Q2.insert( (int) examResults.analyzedCandidateSequences.sumOfInt( CandidateSequence::getnGoodOrDubiousDuplexes)); if (stats.analysisParameters.variableBarcodeLength == 0) { stats.duplexCollisionProbabilityAtQ2.insert((int) (examResults.probAtLeastOneCollision * 1_000d)); } } } private static void registerOutputDisagreements( final @NonNull LocationExaminationResults examResults, final @NonNull AnalysisStats stats, final @NonNull SequenceLocation location) { for (@NonNull ComparablePair<String, String> var: examResults.rawMismatchesQ2) { stats.rawMismatchesQ2.accept(location, var); } for (@NonNull ComparablePair<String, String> var: examResults.rawDeletionsQ2) { stats.rawDeletionsQ2.accept(location, var); stats.rawDeletionLengthQ2.insert(var.snd.length()); } for (@NonNull ComparablePair<String, String> var: examResults.rawInsertionsQ2) { stats.rawInsertionsQ2.accept(location, var); stats.rawInsertionLengthQ2.insert(var.snd.length()); } for (Entry<DuplexDisagreement, List<DuplexRead>> entry: examResults.disagreements) { DuplexDisagreement d = entry.getKey(); if (!stats.detections.computeIfAbsent(location, loc -> new LocationAnalysis(null, Util.serializeAndDeserialize(examResults))). disagreements.add(d)) { throw new AssertionFailedException(); } if (stats.analysisParameters.variableBarcodeLength == 0) { stats.duplexCollisionProbabilityLocalAvAtDisag.insert( (int) (examResults.probAtLeastOneCollision * 1_000d)); stats.duplexCollisionProbabilityAtDisag.insert( (int) (d.probCollision * 1_000d)); } byte[] fstSeq = d.getFst() == null ? null : d.getFst().getSequence(); if (fstSeq == null) { fstSeq = QUESTION_MARK; } byte[] sndSeq = d.getSnd().getSequence(); if (sndSeq == null) { sndSeq = QUESTION_MARK; } final Mutation mutant = d.getSnd(); try { @SuppressWarnings("resource") @Nullable final OutputStreamWriter tpdw = d.hasAWtStrand ? stats.topBottomDisagreementWriter : stats.noWtDisagreementWriter; if (tpdw != null) { NumberFormat formatter = mediumLengthFloatFormatter.get(); tpdw.append(location.getContigName() + '\t' + (location.position + 1) + '\t' + (location.position + 1) + '\t' + (mutant.mutationType == SUBSTITUTION ? (new String(fstSeq) + "" + new String(sndSeq)) : new String (sndSeq)) + '\t' + mutant.mutationType + '\t' + (d.hasAWtStrand ? "" : (d.getFst() != null ? d.getFst().mutationType : "-")) + '\t' + examResults.duplexInsertSize10thP + '\t' + examResults.duplexInsertSize90thP + '\t' + formatter.format(examResults.alleleFrequencies.get(0)) + '\t' + formatter.format(examResults.alleleFrequencies.get(1)) + '\t' + formatter.format(d.probCollision) + '\t' + formatter.format(examResults.probAtLeastOneCollision) + '\t' + entry.getValue().size() + '\t' + ((stats.analysisParameters.verbosity < 2) ? "" : entry.getValue().stream().limit(20).map(dp -> Stream.concat(dp.topStrandRecords.stream(), dp.bottomStrandRecords.stream())/*.findFirst()*/. map(ExtendedSAMRecord::getFullName).collect(Collectors.joining(", ", "{ ", " }")) ).collect(Collectors.joining(", ", "[ ", " ]"))) + '\n'); } } catch (IOException e) { throw new RuntimeException(e); } if (!d.hasAWtStrand) { continue; } switch(mutant.mutationType) { case SUBSTITUTION: stats.topBottomSubstDisagreementsQ2.accept(location, d); mutant.isTemplateStrand().ifPresent(b -> { if (b) stats.templateStrandSubstQ2.accept(location, d); else stats.codingStrandSubstQ2.accept(location, d);}); break; case DELETION: stats.topBottomDelDisagreementsQ2.accept(location, d); mutant.isTemplateStrand().ifPresent(b -> { if (b) stats.templateStrandDelQ2.accept(location, d); else stats.codingStrandDelQ2.accept(location, d);}); break; case INTRON: stats.topBottomDelDisagreementsQ2.accept(location, d); break; case INSERTION: stats.topBottomInsDisagreementsQ2.accept(location, d); mutant.isTemplateStrand().ifPresent(b -> { if (b) stats.templateStrandInsQ2.accept(location, d); else stats.codingStrandInsQ2.accept(location, d); }); break; default: throw new AssertionFailedException(); } } } private static void prepareReadsToWrite( final @NonNull SequenceLocation location, final @NonNull AnalysisChunk analysisChunk, final boolean collapseFilteredReads, final boolean writeBothStrands, final boolean clipPairOverlap, final @NonNull AtomicInteger dn ) { analysisChunk.subAnalyzersParallel.forEach(subAnalyzer -> { //If outputting an alignment populated with fields identifying the duplexes, //fill in the fields here subAnalyzer.analyzedDuplexes.forEach(duplexRead -> { //for (DuplexRead duplexRead: subAnalyzer.analyzedDuplexes) { boolean useAnyStart = duplexRead.maxInsertSize == 0 || duplexRead.maxInsertSize > 10_000; boolean write = location.equals(duplexRead.rightAlignmentEnd) || (useAnyStart && location.equals(duplexRead.leftAlignmentEnd)); if (!write) { return; } final int randomIndexForDuplexName = dn.incrementAndGet(); final int nReads = duplexRead.allDuplexRecords.size(); final Quality minDuplexQuality = duplexRead.minQuality; final Quality maxDuplexQuality = duplexRead.maxQuality; Handle<String> topOrBottom = new Handle<>(); BiConsumer<ExtendedSAMRecord, SAMRecord> queueWrite = (ExtendedSAMRecord e, SAMRecord samRecord) -> { samRecord.setAttribute("DS", nReads); samRecord.setAttribute("DT", duplexRead.topStrandRecords.size()); samRecord.setAttribute("DB", duplexRead.bottomStrandRecords.size()); samRecord.setAttribute("DQ", minDuplexQuality.toInt()); samRecord.setAttribute("DR", maxDuplexQuality.toInt()); samRecord.setDuplicateReadFlag(e.isOpticalDuplicate()); String info = topOrBottom.get() + " Q" + minDuplexQuality.toShortString() + "->" + maxDuplexQuality.toShortString() + " global Qs: " + duplexRead.globalQuality + " P" + duplexRead.getMinMedianPhred() + " D" + mediumLengthFloatFormatter.get().format(duplexRead.referenceDisagreementRate); samRecord.setAttribute("DI", info); samRecord.setAttribute("DN", randomIndexForDuplexName + "--" + System.identityHashCode(duplexRead)); samRecord.setAttribute("VB", new String(e.variableBarcode)); samRecord.setAttribute("VM", new String(e.getMateVariableBarcode())); samRecord.setAttribute("DE", duplexRead.leftAlignmentStart + "-" + duplexRead.leftAlignmentEnd + " duplexRead.rightAlignmentStart + '-' + duplexRead.rightAlignmentEnd); if (!duplexRead.issues.isEmpty()) { samRecord.setAttribute("IS", duplexRead.issues.toString()); } else { samRecord.setAttribute("IS", null); } samRecord.setAttribute("AI", subAnalyzer.getAnalyzer().name); subAnalyzer.queueOutputRead(e, samRecord, useAnyStart); }; Consumer<List<ExtendedSAMRecord>> writePair = (List<ExtendedSAMRecord> list) -> { final ExtendedSAMRecord e = list.get(0); final ExtendedSAMRecord mate = e.getMate(); SAMRecord samRecord = e.record; if (mate != null) { SAMRecord mateSamRecord = mate.record; if (clipPairOverlap) { try { samRecord = (SAMRecord) samRecord.clone(); mateSamRecord = (SAMRecord) mateSamRecord.clone(); } catch (CloneNotSupportedException excp) { throw new RuntimeException(excp); } TrimOverlappingReads.clipForNoOverlap(samRecord, mateSamRecord, e, mate); TrimOverlappingReads.removeClippedBases(samRecord); TrimOverlappingReads.removeClippedBases(mateSamRecord); } if (!mateSamRecord.getReadUnmappedFlag()) { queueWrite.accept(mate, mateSamRecord); } } if (!samRecord.getReadUnmappedFlag()) { queueWrite.accept(e, samRecord); } }; if (collapseFilteredReads) { final boolean topPresent = !duplexRead.topStrandRecords.isEmpty(); if (topPresent) { topOrBottom.set("T"); writePair.accept(duplexRead.topStrandRecords); } if ((writeBothStrands || !topPresent) && !duplexRead.bottomStrandRecords.isEmpty()) { topOrBottom.set("B"); writePair.accept(duplexRead.bottomStrandRecords); } if (!duplexRead.topStrandRecords.isEmpty() && !duplexRead.bottomStrandRecords.isEmpty()) { Assert.isFalse(duplexRead.topStrandRecords.get(0).equals(duplexRead.bottomStrandRecords.get(0))); } } else { topOrBottom.set("T"); duplexRead.topStrandRecords.forEach(e-> queueWrite.accept(e, e.record)); topOrBottom.set("B"); duplexRead.bottomStrandRecords.forEach(e-> queueWrite.accept(e, e.record)); } }); }); } private static boolean shouldLog(Level level) { return logger.isEnabled(level); } }
// JSPyIterableObject.java package ed.lang.python; import java.util.*; import org.python.core.*; public class JSPyIterableObject extends JSPyObjectWrapper implements Iterable { public JSPyIterableObject( PyObject p ){ super(p); } public Iterator iterator(){ return new IterWrapper( _p.asIterable().iterator() ); } class IterWrapper implements Iterator { Iterator _iter; IterWrapper( Iterator i ){ _iter = i; } public Object next(){ return Python.toJS( _iter.next() ); } public void remove(){ _iter.remove(); } public boolean hasNext(){ return _iter.hasNext(); } } }
package beaform; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.Persistence; import javax.transaction.SystemException; import javax.transaction.TransactionManager; import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform; import org.hibernate.jpa.HibernateEntityManagerFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A handler for the graph database using JTA. * * @author Steven Post * */ public class GraphDbHandlerForJTA { /** The instance of this singleton */ private static final GraphDbHandlerForJTA INSTANCE = new GraphDbHandlerForJTA(); /** The {@link EntityManagerFactory} */ private final EntityManagerFactory entityManagerFact; /** The global {@link EntityManager} */ private final EntityManager entityManager; /** The {@link TransactionManager} */ private final TransactionManager transactionMgr; /** * Get the instance of this handler. * @return the instance */ public static GraphDbHandlerForJTA getInstance() { return INSTANCE; } private GraphDbHandlerForJTA() { //build the EntityManagerFactory as you would build in in Hibernate ORM this.entityManagerFact = Persistence.createEntityManagerFactory("ogm-jpa-tutorial"); //accessing JBoss's Transaction can be done differently but this one works nicely final SessionFactoryImplementor sessionFactory = (SessionFactoryImplementor) ( (HibernateEntityManagerFactory) this.entityManagerFact ).getSessionFactory(); this.transactionMgr = sessionFactory.getServiceRegistry().getService( JtaPlatform.class ).retrieveTransactionManager(); // Initialize the main entity manager this.entityManager = this.entityManagerFact.createEntityManager(); final ShutDownHook shutdownHook = new ShutDownHook(this.entityManager, this.entityManagerFact); Runtime.getRuntime().addShutdownHook(shutdownHook); } /** * Gets the status of the global {@link TransactionManager}. * @return the status of the transaction manager * @throws SystemException If the transaction service fails in an unexpected way. */ public static int getTransactionManagerStatus() throws SystemException { return INSTANCE.transactionMgr.getStatus(); } /** * Gets the global {@link TransactionManager}. * @return the {@link TransactionManager} */ public static TransactionManager getTransactionManager() { return INSTANCE.transactionMgr; } /** * Gets the {@link EntityManagerFactory}. * @return the factory */ public static EntityManagerFactory getEntityManagerFactory() { return INSTANCE.entityManagerFact; } /** * This method creates a new {@link EntityManager} * @return the new manager */ private EntityManager createNewEntityManager() { return this.entityManagerFact.createEntityManager(); } /** * This method creates a new {@link EntityManager} * @return the new manager */ public static EntityManager getNewEntityManager() { return INSTANCE.createNewEntityManager(); } /** * Getter for the entity manager. * * @return the entity manager */ public EntityManager getEntityManager() { return this.entityManager; } /** * Try to close the entity manager. * This method will flush the manager first. * @param entityManager the manager to close. */ public static void tryCloseEntityManager(final EntityManager entityManager) { entityManager.flush(); entityManager.close(); } /** * This class is a shutdown hook to make sure the embedded DB is stopped. * * @author Steven Post * */ private final static class ShutDownHook extends Thread { /** A logger */ private static final Logger LOG = LoggerFactory.getLogger(ShutDownHook.class); /** The entity manager factory */ private final EntityManagerFactory entityManagerFact; /** The entity manager */ private final EntityManager entityManager; /** * Constructor. * @param entityManager The entity manager. * @param entityManagerFact The entity manager factory. */ public ShutDownHook(final EntityManager entityManager, final EntityManagerFactory entityManagerFact) { super(); this.entityManager = entityManager; this.entityManagerFact = entityManagerFact; } /** * Invoked when the hook executes. */ @Override public void run() { LOG.info("Start DB shutdown"); this.entityManager.close(); this.entityManagerFact.close(); LOG.info("DB shutdown complete"); } } }
package test; import static org.junit.Assert.*; import java.util.Vector; import misc.PolyFunction; import org.junit.Test; import finders.FinderInterface; import finders.LinearFinder; import finders.NewtonFinder; import finders.QuadraticFinder; public class FinderTest { @Test public void testLinearFinder() { FinderInterface finder = new LinearFinder(); // probeprfung 2a // -2/3x + 5 // result = 7.5 PolyFunction f1 = new PolyFunction(). setCoeff(-2.0/3.0, 1). setCoeff(5.0, 0); Vector<Double> results = finder.find(f1); assertEquals(1, results.size()); assertEquals((Double) 7.5, results.firstElement()); } @Test public void testQuadraticFinder1() { FinderInterface finder = new QuadraticFinder(); // probeprfung 2b // 1/2x^2 - 2x - 6 // results = -2, 6 PolyFunction f1 = new PolyFunction(). setCoeff(0.5, 2). setCoeff(-2.0, 1). setCoeff(-6.0, 0); Vector<Double> results = finder.find(f1); assertEquals(2, results.size()); assertTrue(results.contains(-2.0)); assertTrue(results.contains(6.0)); } @Test public void testQuadraticFinder2() { FinderInterface finder = new QuadraticFinder(); // prfung 1a // -2x^2 + 4x + 6 // results = -1, 3 PolyFunction f1 = new PolyFunction(). setCoeff(-2.0, 2). setCoeff(4.0, 1). setCoeff(6.0, 0); Vector<Double> results = finder.find(f1); assertEquals(2, results.size()); assertTrue(results.contains(-1.0)); assertTrue(results.contains(3.0)); } @Test public void testNewtonFinder1() { FinderInterface finder = new NewtonFinder(); // probeprfung 2e // x^3 - 3x - 2 // results = -1, 2 PolyFunction f1 = new PolyFunction(). setCoeff(1.0, 3). setCoeff(-3.0, 1). setCoeff(-2.0, 0); Vector<Double> results = finder.find(f1); assertEquals(2, results.size()); assertTrue(results.contains(-1.0)); assertTrue(results.contains(2.0)); } @Test public void testNewtonFinder2() { FinderInterface finder = new NewtonFinder(); // result = 0 PolyFunction f1 = new PolyFunction(). setCoeff(1.0, 3); Vector<Double> results = finder.find(f1); assertEquals(1, results.size()); assertEquals((Double) 0.0, results.firstElement()); } @Test public void testNewtonFinder3() { FinderInterface finder = new NewtonFinder(); // ableitung hat keine nullstelle // -x^3 - 3x // result = 0 PolyFunction f1 = new PolyFunction(). setCoeff(-1.0, 3). setCoeff(-3.0, 1); Vector<Double> results = finder.find(f1); assertEquals(1, results.size()); assertEquals((Double) 0.0, results.firstElement()); } @Test public void testNewtonFinder4() { FinderInterface finder = new NewtonFinder(); // result = 0 PolyFunction f1 = new PolyFunction(). setCoeff(-1.0, 3); Vector<Double> results = finder.find(f1); assertEquals(1, results.size()); assertEquals((Double) 0.0, results.firstElement()); } @Test public void testNewtonFinder5() { FinderInterface finder = new NewtonFinder(); // probeprfung 2d // -1/3x^4 + 4/3x^3 // results = 0, 4 PolyFunction f1 = new PolyFunction(). setCoeff(-1.0/3.0, 4). setCoeff(4.0/3.0, 3); Vector<Double> results = finder.find(f1); assertEquals(2, results.size()); assertTrue(results.contains(0.0)); assertTrue(results.contains(4.0)); } @Test public void testNewtonFinder6() { FinderInterface finder = new NewtonFinder(); // probeprfung 7 // 2/3x^3 - 1/2x^2 - 36x + 6 // results = -7.0690, 0.1664, 7.6527 PolyFunction f1 = new PolyFunction(). setCoeff(2.0/3.0, 3). setCoeff(-0.5, 2). setCoeff(-36.0, 1). setCoeff(6.0, 0); Vector<Double> results = finder.find(f1); assertEquals(3, results.size()); assertTrue(results.contains(-7.069)); assertTrue(results.contains(0.166)); assertTrue(results.contains(7.653)); } @Test public void testNewtonFinder7() { FinderInterface finder = new NewtonFinder(); // probeprfung 2f (first part) // -2.0x^3 + 6x // results = -1.7320, 0, 1.7320 PolyFunction f1 = new PolyFunction(). setCoeff(-2.0, 3). setCoeff(6.0, 1); Vector<Double> results = finder.find(f1); assertEquals(3, results.size()); assertTrue(results.contains(-1.732)); assertTrue(results.contains(0.0)); assertTrue(results.contains(1.732)); } @Test public void testNewtonFinder8() { FinderInterface finder = new NewtonFinder(); // tobias special case // 1.5x^4 - x^3 - x^2 + 0.1 // results = 0.2930, 1.1880 PolyFunction f1 = new PolyFunction(). setCoeff(1.5, 4). setCoeff(-1.0, 3). setCoeff(-1.0, 2). setCoeff(0.1, 0); Vector<Double> results = finder.find(f1); assertEquals(2, results.size()); assertTrue(results.contains(0.293)); assertTrue(results.contains(1.188)); } @Test public void testNewtonFinder9() { FinderInterface finder = new NewtonFinder(); // prfung 1b // 0.5x^4 - x^3 // results = 0, 2 PolyFunction f1 = new PolyFunction(). setCoeff(0.5, 4). setCoeff(-1.0, 3); Vector<Double> results = finder.find(f1); assertEquals(2, results.size()); assertTrue(results.contains(0.0)); assertTrue(results.contains(2.0)); } @Test public void testNewtonFinder10() { FinderInterface finder = new NewtonFinder(); // prfung 1c // -2x^3 + 3x^2 // results = 0, 1.5 PolyFunction f1 = new PolyFunction(). setCoeff(-2.0, 3). setCoeff(3.0, 2); Vector<Double> results = finder.find(f1); assertEquals(2, results.size()); assertTrue(results.contains(0.0)); assertTrue(results.contains(1.5)); } @Test public void testNewtonFinder11() { FinderInterface finder = new NewtonFinder(); // prfung 6 // 1/3x^3 + x^2 - 24x + 14 // results = -10.3492, 0.6014, 6.7477 PolyFunction f1 = new PolyFunction(). setCoeff(1.0/3.0, 3). setCoeff(1.0, 2). setCoeff(-24.0, 1). setCoeff(14.0, 0); Vector<Double> results = finder.find(f1); assertEquals(3, results.size()); assertTrue(results.contains(-10.349)); assertTrue(results.contains(0.601)); assertTrue(results.contains(6.748)); } @Test public void testNewtonFinder12() { FinderInterface finder = new NewtonFinder(); // -7/6x^7 + 1/3x^4 + 4/3x^3 // results = -0.96499, 0, 1.09862 PolyFunction f1 = new PolyFunction(). setCoeff(-7.0/6.0, 7). setCoeff(1.0/3.0, 4). setCoeff(4.0/3.0, 3); Vector<Double> results = finder.find(f1); assertEquals(3, results.size()); assertTrue(results.contains(-0.965)); assertTrue(results.contains(0.0)); assertTrue(results.contains(1.099)); } @Test public void testNewtonFinder13() { FinderInterface finder = new NewtonFinder(); // teil von testNewtonFinder12 // ableitung hat nur eine nullstelle // -245x^4 + 8x + 8 // results = -0.377575, 0.467902 PolyFunction f1 = new PolyFunction(). setCoeff(-245.0, 4). setCoeff(8.0, 1). setCoeff(8.0, 0); Vector<Double> results = finder.find(f1); assertEquals(2, results.size()); assertTrue(results.contains(-0.378)); assertTrue(results.contains(0.468)); } @Test public void testNewtonFinder14() { FinderInterface finder = new NewtonFinder(); // keine nullstelle // 53x^4 + 9x^3 + 38x^2 + 4x + 20 // results = none PolyFunction f1 = new PolyFunction(). setCoeff(53.0, 4). setCoeff(9.0, 3). setCoeff(38.0, 2). setCoeff(4.0, 1). setCoeff(20.0, 0); Vector<Double> results = finder.find(f1); assertEquals(0, results.size()); } @Test public void testNewtonFinder15() { FinderInterface finder = new NewtonFinder(); // -6x^12 + 4x^9 - 9x^6 + 2 // results = -0.745162, 0.78614 PolyFunction f1 = new PolyFunction(). setCoeff(-6.0, 12). setCoeff(4.0, 9). setCoeff(-9.0, 6). setCoeff(2.0, 0); Vector<Double> results = finder.find(f1); assertEquals(2, results.size()); assertTrue(results.contains(-0.745)); assertTrue(results.contains(0.786)); } @Test public void testNewtonFinder16() { FinderInterface finder = new NewtonFinder(); // results = 0 PolyFunction f1 = new PolyFunction(). setCoeff(1.0, 4); Vector<Double> results = finder.find(f1); assertEquals(1, results.size()); assertTrue(results.contains(0.0)); } @Test public void testNewtonFinder17() { FinderInterface finder = new NewtonFinder(); // results = 0 PolyFunction f1 = new PolyFunction(). setCoeff(1.0, 5); Vector<Double> results = finder.find(f1); assertEquals(1, results.size()); assertTrue(results.contains(0.0)); } @Test public void testNewtonFinder18() { FinderInterface finder = new NewtonFinder(); // results = 0 PolyFunction f1 = new PolyFunction(). setCoeff(1.0, 6); Vector<Double> results = finder.find(f1); assertEquals(1, results.size()); assertTrue(results.contains(0.0)); } @Test public void testNewtonFinder19() { FinderInterface finder = new NewtonFinder(); // results = 0 PolyFunction f1 = new PolyFunction(). setCoeff(1.0, 7); Vector<Double> results = finder.find(f1); assertEquals(1, results.size()); assertTrue(results.contains(0.0)); } }
package ch.bind.philib.io; import ch.bind.philib.validation.SimpleValidation; public class RingBuffer implements DoubleSidedBuffer { public static final int DEFAULT_CAPACITY = 4096; private byte[] ringBuf; // total capacity private int ringCapacity; // where the data starts private int ringOffset; // available data private int ringSize; public RingBuffer() { this(DEFAULT_CAPACITY); } public RingBuffer(int capacity) { SimpleValidation.notNegative(capacity, "capacity"); _init(capacity); } @Override public int available() { return ringSize; } @Override public void clear() { ringOffset = 0; ringSize = 0; } @Override public void read(byte[] data) { SimpleValidation.notNull(data, "data-buffer"); read(data, 0, data.length); } @Override public void read(byte[] data, int off, int len) { SimpleValidation.notNull(data, "data-buffer"); SimpleValidation.notNegative(off, "offset"); SimpleValidation.notNegative(len, "offset"); _bufferSpaceCheck(data, off, len); if (len == 0) { return; } _readLenCheck(len); _read(data, 0, len); _consumed(len); } @Override public void readBack(byte[] data) { SimpleValidation.notNull(data, "data-buffer"); readBack(data, 0, data.length); } @Override public void readBack(byte[] data, int off, int len) { SimpleValidation.notNull(data, "data-buffer"); SimpleValidation.notNegative(off, "offset"); SimpleValidation.notNegative(len, "offset"); _bufferSpaceCheck(data, off, len); _readLenCheck(len); _readBack(data, 0, len); _consumedBack(len); } @Override public void write(byte[] data) { SimpleValidation.notNull(data, "data-buffer"); write(data, 0, data.length); } @Override public void write(byte[] data, int off, int len) { SimpleValidation.notNull(data, "data-buffer"); SimpleValidation.notNegative(off, "offset"); SimpleValidation.notNegative(len, "offset"); _bufferSpaceCheck(data, off, len); int newSize = ringSize + len; _ensureBufferSize(newSize); _write(data, off, len); ringSize = newSize; } @Override public void writeFront(byte[] data) { SimpleValidation.notNull(data, "data-buffer"); writeFront(data, 0, data.length); } @Override public void writeFront(byte[] data, int off, int len) { SimpleValidation.notNull(data, "data-buffer"); SimpleValidation.notNegative(off, "offset"); SimpleValidation.notNegative(len, "offset"); _bufferSpaceCheck(data, off, len); int newSize = ringSize + len; _ensureBufferSize(newSize); _writeFront(data, off, len); ringSize = newSize; ringOffset = _offsetMinus(len); } private void _init(int capacity) { this.ringCapacity = capacity; this.ringBuf = new byte[capacity]; } private void _bufferSpaceCheck(byte[] data, int off, int len) { // FIXME // off + len > data.length could overflow and therefore fail if (off + len > data.length) { throw new IllegalArgumentException("not enough space in buffer"); } } private void _ensureBufferSize(int requiredSpace) { if (requiredSpace <= ringCapacity) { return; } int newCap = ringCapacity * 2; while (newCap < requiredSpace) { newCap *= 2; } byte[] newBuf = new byte[newCap]; // read all data into the beginning of the new buffer _read(newBuf, 0, ringSize); this.ringBuf = newBuf; this.ringCapacity = newCap; this.ringOffset = 0; } private void _read(byte[] buf, int off, int len) { int availToEnd = ringCapacity - ringOffset; if (availToEnd >= len) { // all data is available from one read ac(ringBuf, ringOffset, buf, off, len); } else { // read available space from the offset to the end of the buffer // then read the rest of the required data from the beginning int rem = len - availToEnd; ac(ringBuf, ringOffset, buf, off, availToEnd); ac(ringBuf, 0, buf, off + availToEnd, rem); } } private void _readBack(byte[] buf, int off, int len) { int firstReadOffset = _offsetPlus(ringSize - len); int availToEnd = ringCapacity - firstReadOffset; int numReadOne = Math.min(availToEnd, len); int numReadTwo = len - numReadOne; ac(ringBuf, firstReadOffset, buf, off, numReadOne); if (numReadTwo > 0) { ac(ringBuf, 0, buf, off + numReadOne, numReadTwo); } } private void _write(byte[] data, int off, int len) { int writePosOne = _offsetPlus(ringSize); int availBack = ringCapacity - writePosOne; int numWriteOne = Math.min(availBack, len); int numWriteTwo = len - numWriteOne; ac(data, off, ringBuf, writePosOne, numWriteOne); if (numWriteTwo > 0) { ac(data, off + numWriteOne, ringBuf, 0, numWriteTwo); } } private void _writeFront(byte[] data, int off, int len) { int writePosOne = _offsetMinus(len); int availBack = ringCapacity - writePosOne; int numWriteOne = Math.min(availBack, len); ac(data, off, ringBuf, writePosOne, numWriteOne); int numWriteTwo = len - numWriteOne; if (numWriteTwo > 0) { ac(data, off + numWriteOne, ringBuf, 0, numWriteTwo); } } private void _consumed(int len) { ringOffset = _offsetPlus(len); ringSize -= len; } private void _consumedBack(int len) { ringSize -= len; } private void _readLenCheck(int len) { if (this.ringSize < len) { throw new IllegalArgumentException(); } } private int _offsetPlus(int shift) { int offset = ringOffset + shift; offset %= ringCapacity; return offset; } private int _offsetMinus(int shift) { int offset = ringOffset - shift; if (offset < 0) { offset += ringCapacity; } return offset; } // shorten all those arraycopy calls private static final void ac(byte[] src, int srcPos, byte[] dst, int dstPos, int length) { System.arraycopy(src, srcPos, dst, dstPos, length); } }
package clientapi.lua; import clientapi.ClientAPI; import org.luaj.vm2.LuaFunction; import org.luaj.vm2.LuaValue; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Used to manage lua event hooking * * @author Brady * @since 11/8/2017 1:53 PM */ public final class LuaHookManager { /** * Map matching lua scripts to a map matching the hook event name to the functions that hook to it. */ private final Map<LuaScript, Map<String, List<LuaFunction>>> hooks; /** * A child event bus that passes events posted by {@code ClientAPI#EVENT_BUS} to Lua event hooks. */ private final LuaEventBus eventBus; /** * The last script that was evaluated. */ private LuaScript currentScript; LuaHookManager() { this.hooks = new HashMap<>(); this.eventBus = new LuaEventBus(this); allow(true); } /** * Creates a hook for the specified event with the specified function. * * @param event The hook event target * @param function The lua hook function */ public final void create(String event, LuaFunction function) { getHooks(currentScript).computeIfAbsent(event, e -> new ArrayList<>()).add(function); } /** * Detaches a {@code LuaScript's} active hooks * * @param script The script * @return Whether or not the hooks were able to be detached */ public final boolean detach(LuaScript script) { return hooks.remove(script) != null; } /** * Returns the hooks defined by the specified {@code LuaScript} * * @param script The script * @return The hooks */ public final Map<String, List<LuaFunction>> getHooks(LuaScript script) { return hooks.computeIfAbsent(script, e -> new HashMap<>()); } /** * Returns all of the active hooks * * @return All of the active hooks */ public final Map<LuaScript, Map<String, List<LuaFunction>>> getHooks() { return this.hooks; } /** * Set whether or not the {@code LuaHookManager} is allowed to pass * events from {@code ClientAPI#EVENT_BUS} to the lua event hooks. * * @param canPassEvents Whether or not events can be passed */ public final void allow(boolean canPassEvents) { if (canPassEvents) ClientAPI.EVENT_BUS.attach(eventBus); else ClientAPI.EVENT_BUS.detach(eventBus); } /** * Sets the current script being evaluated. Only * for internal usage by {@code LuaScript} to keep * track of what script is having hooks registered. * * @param currentScript The current script being evaluated */ void setCurrentScript(LuaScript currentScript) { this.currentScript = currentScript; } }
package com.blobcity.db; import com.blobcity.db.search.SearchParam; import com.blobcity.db.bquery.QueryExecuter; import com.blobcity.db.classannotations.Entity; import com.blobcity.db.constants.Credentials; import com.blobcity.db.fieldannotations.Primary; import com.blobcity.db.constants.QueryType; import com.blobcity.db.exceptions.DbOperationException; import com.blobcity.db.exceptions.InternalAdapterException; import com.blobcity.db.exceptions.InternalDbException; import com.blobcity.db.search.Query; import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.math.BigDecimal; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; /** * This class provides the connection and query execution framework for performing operations on the BlobCity data store. This class must be extended by any * Model that represents a BlobCity Entity. * * @author Sanket Sarang * @author Karishma * @version 1.0 * @since 1.0 */ public abstract class CloudStorage { private String table = null; public CloudStorage() { for (Annotation annotation : this.getClass().getAnnotations()) { if (annotation instanceof Entity) { Entity blobCityEntity = (Entity) annotation; table = blobCityEntity.table(); if (table == null || "".equals(table)) { table = this.getClass().getSimpleName(); } break; } } if (table == null) { table = this.getClass().getSimpleName(); } TableStore.getInstance().registerClass(table, this.getClass()); } public static <T extends CloudStorage> T newInstance(Class<T> clazz) { try { return clazz.newInstance(); } catch (InstantiationException ex) { throw new InternalAdapterException("An error has occurred in the adapter. Check stack trace for more details.", ex); } catch (IllegalAccessException ex) { throw new InternalAdapterException("An error has occurred in the adapter. Check stack trace for more details.", ex); } } public static <T extends CloudStorage> T newInstance(Class<T> clazz, Object pk) { try { T obj = clazz.newInstance(); obj.setPk(pk); return obj; } catch (InstantiationException ex) { throw new InternalAdapterException("An error has occurred in the adapter. Check stack trace for more details.", ex); } catch (IllegalAccessException ex) { throw new InternalAdapterException("An error has occurred in the adapter. Check stack trace for more details.", ex); } } public static <T extends CloudStorage> T newLoadedInstance(Class<T> clazz, Object pk) { try { T obj = clazz.newInstance(); obj.setPk(pk); if (obj.load()) { return obj; } return null; } catch (InstantiationException ex) { throw new InternalAdapterException("An error has occurred in the adapter. Check stack trace for more details.", ex); } catch (IllegalAccessException ex) { throw new InternalAdapterException("An error has occurred in the adapter. Check stack trace for more details.", ex); } } public static <T extends CloudStorage> List<Object> selectAll(Class<T> clazz) { return selectAll(clazz, Object.class); } public static <T extends CloudStorage, P extends Object> List<P> selectAll(final Class<T> clazz, final Class<P> returnTypeClazz) { JSONObject responseJson = postStaticRequest(clazz, QueryType.SELECT_ALL); JSONArray jsonArray; List<P> list; try { if ("1".equals(responseJson.getString("ack"))) { jsonArray = responseJson.getJSONArray("keys"); list = new ArrayList<P>(); for (int i = 0; i < jsonArray.length(); i++) { list.add(dataTypeTransform((P) jsonArray.getString(i), returnTypeClazz)); } return list; } throw new DbOperationException(responseJson.getString("code")); } catch (JSONException ex) { throw new InternalDbException("Error in API JSON response", ex); } } public static <T extends CloudStorage> boolean contains(Class<T> clazz, Object key) { JSONObject responseJson = postStaticRequest(clazz, QueryType.CONTAINS, key); try { if ("1".equals(responseJson.getString("ack"))) { return responseJson.getBoolean("contains"); } throw new DbOperationException(responseJson.getString("code")); } catch (JSONException ex) { throw new InternalDbException("Error in API JSON response", ex); } } public static <T extends CloudStorage> void remove(Class<T> clazz, Object pk) { JSONObject responseJson = postStaticRequest(clazz, QueryType.REMOVE, pk); try { if (responseJson.getString("ack").equals("0")) { throw new DbOperationException(responseJson.getString("code")); } } catch (JSONException ex) { throw new InternalDbException("Error in API JSON response", ex); } } public static <T extends CloudStorage> List<T> search(Query<T> query) { if (query.getFromTables() == null && query.getFromTables().isEmpty()) { throw new InternalAdapterException("No table name set. Table name is a mandatory field queries."); } final Class<T> clazz = query.getFromTables().get(0); final Map<String, Object> requestMap = new HashMap<String, Object>(); requestMap.put("app", Credentials.getInstance().getAppId()); requestMap.put("key", Credentials.getInstance().getAppKey()); requestMap.put("q", QueryType.SEARCH.getQueryCode()); requestMap.put("p", query.asJson()); final String responseString = new QueryExecuter().executeQuery(new JSONObject(requestMap)); final JSONObject responseJson; try { responseJson = new JSONObject(responseString); } catch (JSONException ex) { throw new InternalDbException("Error in processing request/response JSON", ex); } try { if ("1".equals(responseJson.getString("ack"))) { final JSONArray resultJsonArray = responseJson.getJSONArray("p"); final int resultCount = resultJsonArray.length(); final List<T> responseList = new ArrayList<T>(); final String tableName = CloudStorage.getTableName(clazz); TableStore.getInstance().registerClass(tableName, clazz); final Map<String, Field> structureMap = TableStore.getInstance().getStructure(tableName); for (int i = 0; i < resultCount; i++) { final T instance = CloudStorage.newInstance(clazz); final JSONObject instanceData = resultJsonArray.getJSONObject(i); final Iterator<String> columnNameIterator = instanceData.keys(); while (columnNameIterator.hasNext()) { final String columnName = columnNameIterator.next(); final Field field = structureMap.get(columnName); final boolean oldAccessibilityValue = field.isAccessible(); field.setAccessible(true); try { field.set(instance, getCastedValue(field, instanceData.get(columnName), clazz)); } catch (JSONException ex) { throw new InternalDbException("Error in processing JSON. Class: " + clazz + " Request: " + instanceData.toString(), ex); } catch (IllegalArgumentException ex) { throw new InternalAdapterException("An error has occurred in the adapter. Check stack trace for more details.", ex); } catch (IllegalAccessException ex) { throw new InternalAdapterException("An error has occurred in the adapter. Check stack trace for more details.", ex); } finally { field.setAccessible(oldAccessibilityValue); } } responseList.add(instance); } return responseList; } throw new DbOperationException(responseJson.getString("code")); } catch (JSONException ex) { throw new InternalDbException("Error in API JSON response", ex); } } /** * Allows quick search queries on a single column. This method internally uses {@link #search(com.blobcity.db.search.Query) } * * @see #search(com.blobcity.db.search.Query) * @param <T> Any class reference which extends {@link CloudStorage} * @param clazz class reference who's data is to be searched * @param columnName column to be searched * @param values values to be used to filter data in column * @return {@link List} of {@code T} that matches {@code searchParams} */ public static <T extends CloudStorage> List<T> select(final Class<T> clazz, final String columnName, final Object... values) { return search(Query.table(clazz).where(SearchParam.create(columnName).in(values))); } public static <T extends CloudStorage> List<T> filter(Class<T> clazz, String filterName) { throw new UnsupportedOperationException("Not supported yet."); } /** * Statically provides the table name for any instance/child of {@link CloudStorage} that is internally used by the adapter for querying. Note, this method * is not used by the adapter internally but the logic here, should be kept in sync with the rest of the class to ensure table names are evaluated * appropriately. This method can be used for logging purposes where the table name for a class is required. * * @param <T> Any class reference which extends {@link CloudStorage} * @param clazz class reference who's table name is required * @return Name of the table */ public static <T extends CloudStorage> String getTableName(final Class<T> clazz) { final Entity entity = (Entity) clazz.getAnnotation(Entity.class); return entity != null && entity.table() != null && !"".equals(entity.table()) ? entity.table() : clazz.getSimpleName(); } protected void setPk(Object pk) { Field primaryKeyField = TableStore.getInstance().getPkField(table); try { primaryKeyField.setAccessible(true); primaryKeyField.set(this, pk); primaryKeyField.setAccessible(false); } catch (IllegalArgumentException ex) { throw new InternalAdapterException("An error has occurred in the adapter. Check stack trace for more details.", ex); } catch (IllegalAccessException ex) { throw new InternalAdapterException("An error has occurred in the adapter. Check stack trace for more details.", ex); } } public boolean load() { JSONObject responseJson; JSONObject payloadJson; responseJson = postRequest(QueryType.LOAD); try { /* If ack:0 then check for error code and report accordingly */ if ("0".equals(responseJson.getString("ack"))) { if ("DB200".equals(responseJson.getString("code"))) { return false; } else { reportIfError(responseJson); } } payloadJson = responseJson.getJSONObject("p"); fromJson(payloadJson); return true; } catch (JSONException ex) { throw new InternalDbException("Error in API JSON response", ex); } } public void save() { JSONObject responseJson = postRequest(QueryType.SAVE); reportIfError(responseJson); } public boolean insert() { JSONObject responseJson = postRequest(QueryType.INSERT); try { if ("1".equals(responseJson.getString("ack"))) { final JSONObject payloadJson = responseJson.getJSONObject("p"); fromJson(payloadJson); return true; } else if ("0".equals(responseJson.getString("ack"))) { if ("DB201".equals(responseJson.getString("code"))) { return false; } /* * considering conditions before this and the code in {@link #reportIfError(JSONObject)}, this call will always result in an exception. */ reportIfError(responseJson); } throw new InternalAdapterException("Unknown acknowledgement code from the database. Expected: [0, 1]. Actual: " + responseJson.getString("ack")); } catch (Exception ex) { reportIfError(responseJson); throw new InternalAdapterException("Exception occurred in the adapter.", ex); } } public void remove() { final JSONObject responseJson = postRequest(QueryType.REMOVE); try { /* If ack:0 then check for error code and report accordingly */ if ("0".equals(responseJson.getString("ack")) && !responseJson.getString("code").equals("DB200")) { reportIfError(responseJson); } } catch (JSONException ex) { throw new InternalDbException("Error in API JSON response", ex); } } /** * Gets a JSON representation of the object. The column names are same as those loaded in {@link TableStore} * * @return {@link JSONObject} representing the entity class in its current state */ public JSONObject asJson() { try { return toJson(); } catch (IllegalArgumentException ex) { throw new InternalAdapterException("An error has occurred in the adapter. Check stack trace for more details.", ex); } catch (IllegalAccessException ex) { throw new InternalAdapterException("An error has occurred in the adapter. Check stack trace for more details.", ex); } } /** * Instantiates current object with data from the provided {@link JSONObject}. * * Every column mentioned in the {@link CloudStorage} instance (as maintained by {@link TableStore}) will be loaded with data. If any of these column name * IDs do not exist in the provided {@link JSONObject}, an {@link InternalDbException} will be thrown. If there are any issues whilst reflecting the data * into the instance, an {@link InternalAdapterException} will be thrown. * * If any data already exists the calling object in any field mapped as a column, the data will be overwritten and lost. * * @param jsonObject input {@link JSONObject} from which the data for the current instance are to be loaded. */ private void fromJson(final JSONObject jsonObject) { final Map<String, Field> structureMap = TableStore.getInstance().getStructure(table); for (final String columnName : structureMap.keySet()) { final Field field = structureMap.get(columnName); field.setAccessible(true); try { setFieldValue(field, jsonObject.get(columnName)); } catch (JSONException ex) { throw new InternalDbException("Error in processing JSON. Class: " + this.getClass() + " Request: " + jsonObject.toString(), ex); } catch (IllegalArgumentException ex) { throw new InternalAdapterException("An error has occurred in the adapter. Check stack trace for more details.", ex); } catch (IllegalAccessException ex) { throw new InternalAdapterException("An error has occurred in the adapter. Check stack trace for more details.", ex); } } } private JSONObject postRequest(QueryType queryType) { JSONObject requestJson; JSONObject responseJson; try { requestJson = new JSONObject(); requestJson.put("app", Credentials.getInstance().getAppId()); requestJson.put("key", Credentials.getInstance().getAppKey()); requestJson.put("t", table); requestJson.put("q", queryType.getQueryCode()); switch (queryType) { case LOAD: case REMOVE: requestJson.put("pk", getPrimaryKeyValue()); break; case INSERT: case SAVE: requestJson.put("p", toJson()); break; default: throw new InternalDbException("Attempting to executed unknown or unidentifed query"); } final String responseString = new QueryExecuter().executeQuery(requestJson); responseJson = new JSONObject(responseString); return responseJson; } catch (JSONException ex) { throw new InternalDbException("Error in processing request/response JSON", ex); } catch (IllegalArgumentException ex) { throw new InternalAdapterException("An error has occurred in the adapter. Check stack trace for more details.", ex); } catch (IllegalAccessException ex) { throw new InternalAdapterException("An error has occurred in the adapter. Check stack trace for more details.", ex); } } private static <T extends CloudStorage> JSONObject postStaticRequest(Class<T> clazz, QueryType queryType) { JSONObject requestJson; JSONObject responseJson; Entity entity = (Entity) clazz.getAnnotation(Entity.class); requestJson = new JSONObject(); try { requestJson.put("app", Credentials.getInstance().getAppId()); requestJson.put("key", Credentials.getInstance().getAppKey()); final String tableName = entity != null && entity.table() != null && !"".equals(entity.table()) ? entity.table() : clazz.getSimpleName(); requestJson.put("t", tableName); requestJson.put("q", queryType.getQueryCode()); final String responseString = new QueryExecuter().executeQuery(requestJson); responseJson = new JSONObject(responseString); return responseJson; } catch (JSONException ex) { throw new InternalDbException("Error in processing request/response JSON", ex); } } private static <T extends CloudStorage> JSONObject postStaticRequest(Class<T> clazz, QueryType queryType, Object pk) { JSONObject requestJson; JSONObject responseJson; final Entity entity = (Entity) clazz.getAnnotation(Entity.class); requestJson = new JSONObject(); try { requestJson.put("app", Credentials.getInstance().getAppId()); requestJson.put("key", Credentials.getInstance().getAppKey()); final String tableName = entity != null && entity.table() != null && !"".equals(entity.table()) ? entity.table() : clazz.getSimpleName(); requestJson.put("t", tableName); requestJson.put("q", queryType.getQueryCode()); requestJson.put("pk", pk); final String responseString = new QueryExecuter().executeQuery(requestJson); responseJson = new JSONObject(responseString); return responseJson; } catch (JSONException ex) { throw new InternalDbException("Error in processing request/response JSON", ex); } } private JSONObject toJson() throws IllegalArgumentException, IllegalAccessException { final Map<String, Field> structureMap = TableStore.getInstance().getStructure(table); final Map<String, Object> dataMap = new HashMap<String, Object>(); for (String columnName : structureMap.keySet()) { final Field field = structureMap.get(columnName); field.setAccessible(true); dataMap.put(columnName, field.get(this)); } return new JSONObject(dataMap); } private void reportIfError(JSONObject jsonObject) { try { if (!"1".equals(jsonObject.getString("ack"))) { String cause = ""; String code = ""; if (jsonObject.has("code")) { code = jsonObject.getString("code"); } if (jsonObject.has("cause")) { cause = jsonObject.getString("cause"); } throw new DbOperationException(code, cause); } } catch (JSONException ex) { throw new InternalDbException("Error in API JSON response", ex); } } private Object getPrimaryKeyValue() throws IllegalArgumentException, IllegalAccessException { Map<String, Field> structureMap = TableStore.getInstance().getStructure(table); for (String columnName : structureMap.keySet()) { Field field = structureMap.get(columnName); if (field.getAnnotation(Primary.class) != null) { field.setAccessible(true); return field.get(this); } } return null; } /** * Transforms data type of a column dynamically leveraging Java Type Erasure. Currently supports all types that can be used as primary keys in tables. * * @param <P> Requested data format class parameter * @param value value to be transformed * @param returnTypeClazz Class object in who's image the {@code value} has to be transformed * @return transformed data object to an appropriate type */ private static <P extends Object> P dataTypeTransform(final P value, final Class<P> returnTypeClazz) { if (returnTypeClazz == Integer.class) { return (P) Integer.valueOf(value.toString()); } if (returnTypeClazz == Float.class) { return (P) Float.valueOf(value.toString()); } if (returnTypeClazz == Long.class) { return (P) Long.valueOf(value.toString()); } if (returnTypeClazz == Double.class) { return (P) Double.valueOf(value.toString()); } // String return value; } private void setFieldValue(final Field field, final Object value) throws IllegalAccessException { final boolean oldAccessibilityValue = field.isAccessible(); field.setAccessible(true); field.set(this, getCastedValue(field, value, this.getClass())); field.setAccessible(oldAccessibilityValue); } /** * Provides a standard service to cast input types from JSON's format ({@link Integer}, {@link String}, {@link JSONArray} etc.) to Java's internal data * types. * * @param field field in current {@link Object} that needs to be updated * @param value value to be set for the field * @param parentClazz {@link Class} value of the parent object for which the casted field is being requested. This field is only required for proper error * logging in case of exceptions * @return appropriately casted value */ private static Object getCastedValue(final Field field, final Object value, final Class<?> parentClazz) { final Class<?> type = field.getType(); if (type == String.class) { // Pre-exit most common use cases if (value.getClass() == JSONObject.NULL.getClass()) { return null; } return value; } if (type.isEnum()) { return Enum.valueOf((Class<? extends Enum>) type, value.toString()); } if (type == Double.TYPE || type == Double.class) { return new Double(value.toString()); } if (type == Float.TYPE || type == Float.class) { return new Float(value.toString()); } if (type == Character.TYPE || type == Character.class) { return new Character(value.toString().charAt(0)); } if (type == BigDecimal.class) { return new BigDecimal(value.toString()); } // Note: This code is unnecessary but is kept here to show that these values are supported and if tomorrow, // the return type of the DB changes to String instead of an int/long in JSON, this code shold be uncommented // if (type == Integer.TYPE || type == Integer.class) { // should be unnecessary // return new Integer(value.toString()); // if (type == Long.TYPE || type == Long.class) { // should be unnecessary // return new Long(value.toString()); if (type == List.class) { // doesn't always return inside this block, BEWARE! if (value instanceof JSONArray) { final JSONArray arr = (JSONArray) value; final int length = arr.length(); final List<Object> list = new ArrayList(length); for (int i = 0; i < length; i++) { list.add(arr.opt(i)); } return list; } else if ((value instanceof String && "".equals(value)) || value.getClass() == JSONObject.NULL.getClass()) { return new ArrayList(); } Logger.getLogger(CloudStorage.class.getName()).log(Level.WARNING, "Class of type \"{0}\" has field with name \"{1}\" and data type \"{2}\" for value to be set was \"{3}\" has a type of {4}. This will probably cause an exception.", new Object[]{parentClazz, field.getName(), type, value, value.getClass()}); } // The if for List check does not always return a value. Be sure before putting any code below here // String & any other weird type return value; } }
import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.*; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.servlet.*; public class SVGtoJPGServlet extends HttpServlet { @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { resp.getWriter().print("Hello from Java!\n"); } public static void main(String[] args) throws Exception{ Server server = new Server(Integer.valueOf(System.getenv("PORT"))); ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS); context.setContextPath("/"); server.setHandler(context);
package com.conmigo.app; import org.springframework.context.MessageSource; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.EnableAspectJAutoProxy; import org.springframework.context.support.ReloadableResourceBundleMessageSource; import org.springframework.web.servlet.config.annotation.DefaultServletHandlerConfigurer; import org.springframework.web.servlet.config.annotation.EnableWebMvc; import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter; @Configuration @EnableWebMvc @EnableAspectJAutoProxy public class WebMvcConfig extends WebMvcConfigurerAdapter { @Override public void configureDefaultServletHandling( final DefaultServletHandlerConfigurer configurer ) { configurer.enable(); } @Bean public MessageSource messageSource() { ReloadableResourceBundleMessageSource messageSource = new ReloadableResourceBundleMessageSource(); messageSource.setBasenames( "/resources/i18n/messages", "classpath:/conmigo" ); messageSource.setUseCodeAsDefaultMessage( true ); messageSource.setDefaultEncoding( "UTF-8" ); // messageSource.setParentMessageSource( i18nMessageResourceAware() ); return messageSource; } }
package com.couchbase.lite; import com.couchbase.lite.internal.InterfaceAudience; import com.couchbase.lite.util.Log; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutionException; /** * A Query subclass that automatically refreshes the result rows every time the database changes. * All you need to do is use add a listener to observe changes. */ public final class LiveQuery extends Query implements Database.ChangeListener { private boolean observing; private boolean willUpdate; private QueryEnumerator rows; private List<ChangeListener> observers = new ArrayList<ChangeListener>(); private Throwable lastError; /** * Constructor */ @InterfaceAudience.Private /* package */ LiveQuery(Query query) { super(query.getDatabase(), query.getView()); setLimit(query.getLimit()); setSkip(query.getSkip()); setStartKey(query.getStartKey()); setEndKey(query.getEndKey()); setDescending(query.isDescending()); setPrefetch(query.shouldPrefetch()); setKeys(query.getKeys()); setGroupLevel(query.getGroupLevel()); setMapOnly(query.isMapOnly()); setStartKeyDocId(query.getStartKeyDocId()); setEndKeyDocId(query.getEndKeyDocId()); setIndexUpdateMode(query.getIndexUpdateMode()); } /** * Sends the query to the server and returns an enumerator over the result rows (Synchronous). * Note: In a CBLLiveQuery you should add a ChangeListener and call start() instead. */ @Override @InterfaceAudience.Public public QueryEnumerator run() throws CouchbaseLiteException { try { waitForRows(); } catch (Exception e) { lastError = e; throw new CouchbaseLiteException(e, Status.INTERNAL_SERVER_ERROR); } if (rows == null) { return null; } else { // Have to return a copy because the enumeration has to start at item #0 every time return new QueryEnumerator(rows); } } /** * Returns the last error, if any, that occured while executing the Query, otherwise null. */ @InterfaceAudience.Public public Throwable getLastError() { return lastError; } /** * Starts observing database changes. The .rows property will now update automatically. (You * usually don't need to call this yourself, since calling getRows() will start it for you */ @InterfaceAudience.Public public void start() { if (!observing) { observing = true; getDatabase().addChangeListener(this); update(); } } /** * Stops observing database changes. Calling start() or rows() will restart it. */ @InterfaceAudience.Public public void stop() { if (observing) { observing = false; getDatabase().removeChangeListener(this); } if (willUpdate) { setWillUpdate(false); } // slight diversion from iOS version -- cancel the updateQueryFuture // regardless of the willUpdate value, since there can be an update in flight // with willUpdate set to false. was needed to make testLiveQueryStop() unit test pass. if (updateQueryFuture != null) { boolean cancelled = updateQueryFuture.cancel(true); Log.d(Database.TAG, this + ": cancelled updateQueryFuture " + updateQueryFuture + ", returned: " + cancelled); } } /** * Blocks until the intial async query finishes. After this call either .rows or .error will be non-nil. */ @InterfaceAudience.Public public void waitForRows() throws InterruptedException, ExecutionException { start(); try { updateQueryFuture.get(); } catch (InterruptedException e) { Log.e(Database.TAG, "Got interrupted exception waiting for rows", e); throw e; } catch (ExecutionException e) { Log.e(Database.TAG, "Got execution exception waiting for rows", e); throw e; } } /** * Gets the results of the Query. The value will be null until the initial Query completes. */ @InterfaceAudience.Public public QueryEnumerator getRows() { start(); if (rows == null) { return null; } else { // Have to return a copy because the enumeration has to start at item #0 every time return new QueryEnumerator(rows); } } /** * Add a change listener to be notified when the live query result * set changes. */ @InterfaceAudience.Public public void addChangeListener(ChangeListener changeListener) { observers.add(changeListener); } /** * Remove previously added change listener */ @InterfaceAudience.Public public void removeChangeListener(ChangeListener changeListener) { observers.remove(changeListener); } /** * The type of event raised when a LiveQuery result set changes. */ @InterfaceAudience.Public public static class ChangeEvent { private LiveQuery source; private Throwable error; private QueryEnumerator queryEnumerator; ChangeEvent() { } ChangeEvent(LiveQuery source, QueryEnumerator queryEnumerator) { this.source = source; this.queryEnumerator = queryEnumerator; } ChangeEvent(Throwable error) { this.error = error; } public LiveQuery getSource() { return source; } public Throwable getError() { return error; } public QueryEnumerator getRows() { return queryEnumerator; } } /** * A delegate that can be used to listen for LiveQuery result set changes. */ @InterfaceAudience.Public public static interface ChangeListener { public void changed(ChangeEvent event); } @InterfaceAudience.Private /* package */ void update() { Log.d(Database.TAG, "update() called"); if (getView() == null) { throw new IllegalStateException("Cannot start LiveQuery when view is null"); } setWillUpdate(false); updateQueryFuture = runAsyncInternal(new QueryCompleteListener() { @Override public void completed(QueryEnumerator rowsParam, Throwable error) { if (error != null) { for (ChangeListener observer : observers) { observer.changed(new ChangeEvent(error)); } lastError = error; } else { if (rowsParam != null && !rowsParam.equals(rows)) { setRows(rowsParam); for (ChangeListener observer : observers) { Log.d(Database.TAG, "update() calling back observer with rows"); observer.changed(new ChangeEvent(LiveQuery.this, rows)); } } lastError = null; } } }); } /** * @exclude */ @Override @InterfaceAudience.Private public void changed(Database.ChangeEvent event) { if (!willUpdate) { setWillUpdate(true); updateQueryFuture = getDatabase().runAsync(new AsyncTask() { @Override public void run(Database database) { update(); } }); Log.d(Database.TAG, "changed() called, created updateQueryFuture: " + updateQueryFuture); } } @InterfaceAudience.Private private synchronized void setRows(QueryEnumerator queryEnumerator) { rows = queryEnumerator; } @InterfaceAudience.Private private synchronized void setWillUpdate(boolean willUpdateParam) { willUpdate = willUpdateParam; } }
package com.couchbase.lite; import com.couchbase.lite.internal.InterfaceAudience; import com.couchbase.lite.util.Log; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutionException; /** * A Query subclass that automatically refreshes the result rows every time the database changes. * All you need to do is use add a listener to observe changes. */ public class LiveQuery extends Query implements Database.ChangeListener { private boolean observing; private boolean willUpdate; private QueryEnumerator rows; private List<ChangeListener> observers = new ArrayList<ChangeListener>(); private Throwable lastError; /** * Constructor */ @InterfaceAudience.Private LiveQuery(Query query) { super(query.getDatabase(), query.getView()); setLimit(query.getLimit()); setSkip(query.getSkip()); setStartKey(query.getStartKey()); setEndKey(query.getEndKey()); setDescending(query.isDescending()); setPrefetch(query.shouldPrefetch()); setKeys(query.getKeys()); setGroupLevel(query.getGroupLevel()); setMapOnly(query.isMapOnly()); setStartKeyDocId(query.getStartKeyDocId()); setEndKeyDocId(query.getEndKeyDocId()); setIndexUpdateMode(query.getIndexUpdateMode()); } /** * In LiveQuery the rows accessor is a non-blocking property. * Its value will be nil until the initial query finishes. */ @InterfaceAudience.Public public QueryEnumerator run() throws CouchbaseLiteException { if (rows == null) { return null; } else { // Have to return a copy because the enumeration has to start at item #0 every time return new QueryEnumerator(rows); } } /** * Returns the last error, if any, that occured while executing the Query, otherwise null. */ @InterfaceAudience.Public public Throwable getLastError() { return lastError; } /** * Starts observing database changes. The .rows property will now update automatically. (You * usually don't need to call this yourself, since calling rows() * call start for you.) */ @InterfaceAudience.Public public void start() { if (!observing) { observing = true; getDatabase().addChangeListener(this); } update(); } /** * Stops observing database changes. Calling start() or rows() will restart it. */ @InterfaceAudience.Public public void stop() { if (observing) { observing = false; getDatabase().removeChangeListener(this); } if (willUpdate) { setWillUpdate(false); updateQueryFuture.cancel(true); } } /** * Blocks until the intial async query finishes. After this call either .rows or .error will be non-nil. */ @InterfaceAudience.Public public void waitForRows() throws InterruptedException, ExecutionException { start(); try { updateQueryFuture.get(); } catch (InterruptedException e) { Log.e(Database.TAG, "Got interrupted exception waiting for rows", e); throw e; } catch (ExecutionException e) { Log.e(Database.TAG, "Got execution exception waiting for rows", e); throw e; } } /** * Add a change listener to be notified when the live query result * set changes. */ @InterfaceAudience.Public public void addChangeListener(ChangeListener changeListener) { observers.add(changeListener); } /** * Remove previously added change listener */ @InterfaceAudience.Public public void removeChangeListener(ChangeListener changeListener) { observers.remove(changeListener); } void update() { if (getView() == null) { throw new IllegalStateException("Cannot start LiveQuery when view is null"); } setWillUpdate(false); updateQueryFuture = runAsyncInternal(new QueryCompleteListener() { @Override public void completed(QueryEnumerator rowsParam, Throwable error) { if (error != null) { for (ChangeListener observer : observers) { observer.changed(new ChangeEvent(error)); } lastError = error; } else { if (rowsParam != null && !rowsParam.equals(rows)) { setRows(rowsParam); for (ChangeListener observer : observers) { observer.changed(new ChangeEvent(LiveQuery.this, rows)); } } lastError = null; } } }); } @Override public void changed(Database.ChangeEvent event) { if (!willUpdate) { setWillUpdate(true); update(); } } private synchronized void setRows(QueryEnumerator queryEnumerator) { rows = queryEnumerator; } private synchronized void setWillUpdate(boolean willUpdateParam) { willUpdate = willUpdateParam; } public static class ChangeEvent { private LiveQuery source; private Throwable error; private QueryEnumerator queryEnumerator; ChangeEvent() { } ChangeEvent(LiveQuery source, QueryEnumerator queryEnumerator) { this.source = source; this.queryEnumerator = queryEnumerator; } ChangeEvent(Throwable error) { this.error = error; } public LiveQuery getSource() { return source; } public Throwable getError() { return error; } public QueryEnumerator getRows() { return queryEnumerator; } } public static interface ChangeListener { public void changed(ChangeEvent event); } }