answer
stringlengths
17
10.2M
package com.groupon.lex.metrics.history.v2.list; import com.groupon.lex.metrics.history.TSDataVersionDispatch; import com.groupon.lex.metrics.history.xdr.support.FileSupport; import com.groupon.lex.metrics.lib.GCCloseable; import com.groupon.lex.metrics.timeseries.TimeSeriesCollection; import java.io.IOException; import java.nio.channels.FileChannel; import java.util.Collection; /** * * @author ariane */ public class FileListFileSupport implements FileSupport.Writer { @Override public void create_file(TSDataVersionDispatch.Releaseable<FileChannel> fd, Collection<? extends TimeSeriesCollection> tsdata, boolean compress) throws IOException { RWListFile listFile = RWListFile.newFile(new GCCloseable<>(fd.release()), true); tsdata.forEach(listFile::add); } @Override public short getMajor() { return (short)2; } @Override public short getMinor() { return (short)0; } }
package com.worth.ifs.security; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.web.access.AccessDeniedHandler; import org.springframework.security.web.access.AccessDeniedHandlerImpl; import org.springframework.security.web.csrf.CsrfException; import org.springframework.security.web.csrf.CsrfToken; import org.springframework.security.web.util.matcher.RequestMatcher; import org.springframework.stereotype.Service; import org.springframework.web.filter.OncePerRequestFilter; import javax.servlet.FilterChain; import javax.servlet.ServletException; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.regex.Pattern; /** * <p> * Variation of the default Spring Security {@link org.springframework.security.web.csrf.CsrfFilter}. Caters for being a stateless application by employing the Encrypted Token Pattern: * <ol> * <li>Client browser sends a request.</li> * <li>Filter generates a new encrypted token using the {@link CsrfTokenService} consisting of a random one-off string (nonce), the UID of the currently authenticated user, and a timestamp.</li> * <li>Token is added into the response as a hidden field of any Thymeleaf forms which can then be passed back on submission.</li> * <li>Token is also added as a cookie to be read by any javascript wanting to make a protected request (e.g. ajax) rather than relying on a form with the hidden field existing in the DOM which won't always be the case.</li> * <li>Client browser sends a protected request invoking a state change (e.g. a request using the POST method)</li> * <li>The protected request includes either the CSRF token in the `X-CSRF-TOKEN header (e.g. for an ajax request), or as the `_csrf` parameter (e.g. when a Thymeleaf form is submitted).</li> * <li>Repeat the initial steps to generate a new token and add it to the response for use in subsequent requests.</li> * <li>{@link CsrfTokenService} decrypts and validates that the token has not expired and was generated for the currently authenticated user.</li> * <li>On failure handle the request with a 403 response, otherwise forward the request to the next filter in the chain.</li> * </ol> * </p> * <p> * Developers are required to ensure that {@link CsrfStatelessFilter} is invoked for any request that allows state to change. * Typically this just means that they should ensure their web application follows proper REST semantics (i.e. do not change state with the HTTP methods GET, HEAD, TRACE, OPTIONS). * </p> * <p> * When used in combination with the {@link org.springframework.security.config.annotation.web.configuration.EnableWebSecurity} annotation on a {@link org.springframework.context.annotation.Configuration} class, * any Thymeleaf form will automatically have the CsrfToken appended by the {@link org.springframework.security.web.servlet.support.csrf.CsrfRequestDataValueProcessor}. * </p> * <p> * The Thymeleaf CSRF processor relies on the form having a th:action attribute and not the HTML action, so the attribute {@code action="#"} is not sufficient to invoke this. * {@code th:action} calls RequestDataValueProcessor.getExtraHiddenFields(...) and adds the returned CSRF token as a hidden field just before the closing </form> tag. * </p> */ @Service final class CsrfStatelessFilter extends OncePerRequestFilter { private static final String CSRF_COOKIE_NAME = "CSRF-TOKEN"; private static final Log LOG = LogFactory.getLog(CsrfStatelessFilter.class); @Autowired private CsrfTokenService tokenService; private AccessDeniedHandler accessDeniedHandler = new AccessDeniedHandlerImpl(); private final RequestMatcher requireCsrfProtectionMatcher = new DefaultRequiresCsrfMatcher(); @Override protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) throws ServletException, IOException { final CsrfToken token = tokenService.generateToken(); // Set the token to be used in subsequent requests if (!isResourceRequest(request)) { // Add the CsrfToken as an attribute of the request as expected by org.springframework.security.web.servlet.support.csrf.CsrfRequestDataValueProcessor#getExtraHiddenFields(javax.servlet.http.HttpServletRequest) request.setAttribute(CsrfToken.class.getName(), token); // Not all pages have a Thymeleaf form on them. // To cater for javascript clients making ajax calls that need to supply the CSRF token as a header parameter, set the token in the response as a cookie. // The cookie can then be the preferred location for finding the current CSRF token in javascript, over relying on a searching for a Thymeleaf form in the DOM. setTokenAsCookie(response, token); } // Check if CSRF protection should be applied to this request if (!requireCsrfProtectionMatcher.matches(request)) { filterChain.doFilter(request, response); return; } // Validate the CSRF token try { tokenService.validateToken(request); } catch (final CsrfException e) { LOG.warn("Handling access denied for exception", e); accessDeniedHandler.handle(request, response, e); return; } filterChain.doFilter(request, response); } private boolean isResourceRequest(final HttpServletRequest request) { final String uri = request.getRequestURI(); return uri.contains("/js/") || uri.contains("/css/") || uri.contains("/images/") || uri.contains("/favicon.ico") || uri.contains("/prototypes") || uri.contains("/error"); } private void setTokenAsCookie(final HttpServletResponse response, final CsrfToken token) { response.addCookie(createCookie(token)); } private Cookie createCookie(final CsrfToken token) { final Cookie cookie = new Cookie(CSRF_COOKIE_NAME, token.getToken()); cookie.setPath("/"); cookie.setMaxAge(30 * 60); cookie.setSecure(true); return cookie; } protected void setAccessDeniedHandler(AccessDeniedHandler accessDeniedHandler) { this.accessDeniedHandler = accessDeniedHandler; } private static final class DefaultRequiresCsrfMatcher implements RequestMatcher { private final Pattern allowedMethods = Pattern.compile("^(GET|HEAD|TRACE|OPTIONS)$"); @Override public boolean matches(HttpServletRequest request) { return !allowedMethods.matcher(request.getMethod()).matches(); } } }
package edu.isi.dig.webservices; import java.util.Map; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.sort.SortOrder; @Path("/isi") public class DereferenceURIServlet { public static String PARAM_NOT_FOUND = "Not_Found"; final String INDEX_IMAGES = "images"; final String INDEX_TYPE_IMAGE = "image"; final String INDEX_PAGES = "pages"; final String INDEX_TYPE_PAGE = "page"; final String SEARCH_RESULTS="results"; final String CLUSTER_NAME = "cluster.name"; final String CLUSTER_NAME_VALUE = "dig_isi"; final String ELASTICSEARCH_HOST = "karma-dig-service.cloudapp.net"; final int ELASTICSEARCH_PORT = 55309; //or 55315 for dig 5 Client esClient=null; TransportClient ts =null; MultiSearchResponse multiResp = null; Settings settings = null; @GET @Path("/images/{sha}/{epoch}/processed") public String GetElasticSearchImages(@PathParam("sha") String sha, @PathParam("epoch") String epoch){ return GetImagesURLs(sha, epoch, INDEX_IMAGES, INDEX_TYPE_IMAGE); } @GET @Path("/images/{sha}/latest/processed") public String GetElasticSearchImagesBySha(@PathParam("sha") String sha){ return GetImageURLsBySha(sha, INDEX_IMAGES, INDEX_TYPE_IMAGE); } @GET @Path("/images/{sha}/processed") public String GetElasticSearchImageAllEpochs(@PathParam("sha") String sha){ return GetImagesAllEpochs(sha, INDEX_IMAGES, INDEX_TYPE_IMAGE); } @GET @Path("/pages/{sha}/{epoch}/processed") public String GetElasticSearchPages(@PathParam("sha") String sha, @PathParam("epoch") String epoch){ return GetPagesURLs(sha, epoch, INDEX_PAGES, INDEX_TYPE_PAGE); } @GET @Path("/pages/{sha}/latest/processed") public String GetElasticSearchPagesBySha(@PathParam("sha") String sha){ return GetPageURLsBySha(sha, INDEX_PAGES, INDEX_TYPE_PAGE); } @GET @Path("/pages/{sha}/processed") public String GetElasticSearchAllEpochs(@PathParam("sha") String sha){ return GetPagesAllEpochs(sha, INDEX_PAGES, INDEX_TYPE_PAGE); } public String GetImagesURLs(String sha, String epoch,String indexName, String indexType){ try{ settings = ImmutableSettings.settingsBuilder() .put(CLUSTER_NAME, CLUSTER_NAME_VALUE).build(); ts = new TransportClient(settings); esClient = ts.addTransportAddress(new InetSocketTransportAddress(ELASTICSEARCH_HOST, ELASTICSEARCH_PORT)); SearchRequestBuilder srbSha = esClient.prepareSearch() .setQuery(QueryBuilders.matchQuery(SearchFieldsES.SHA1, sha)) .setIndices(indexName) .setTypes(indexType) .addField(SearchFieldsES.NATIVE_URL) .addField(SearchFieldsES.CACHE_URL) .addField(SearchFieldsES.CONTENT_URL) .addField(SearchFieldsES.MEMEX_URL) .addField(SearchFieldsES.CONTENT_SHA1) .addField(SearchFieldsES.EPOCH) .addField(SearchFieldsES.SHA1) .addField(SearchFieldsES.SOURCE); SearchRequestBuilder srbEpoch = esClient.prepareSearch() .setQuery(QueryBuilders.matchQuery(SearchFieldsES.EPOCH, epoch)) .setIndices(indexName) .setTypes(indexType) .addField(SearchFieldsES.NATIVE_URL) .addField(SearchFieldsES.CACHE_URL) .addField(SearchFieldsES.CONTENT_URL) .addField(SearchFieldsES.MEMEX_URL) .addField(SearchFieldsES.CONTENT_SHA1) .addField(SearchFieldsES.EPOCH) .addField(SearchFieldsES.SHA1) .addField(SearchFieldsES.SOURCE); multiResp = esClient.prepareMultiSearch() .add(srbSha) .add(srbEpoch) .execute() .actionGet(); MultiSearchResponse.Item item = multiResp.getResponses()[0]; SearchHit[] searchHit; Map<String,SearchHitField> map ; SearchResponse searchResp = item.getResponse(); searchHit = searchResp.getHits().getHits(); JSONObject parentObj= new JSONObject(); JSONArray jArray = new JSONArray(); if(searchHit.length > 0) { for(SearchHit sr : searchHit){ map = sr.getFields(); JSONObject obj = new JSONObject(); obj.accumulate(map.get(SearchFieldsES.NATIVE_URL).getName(), map.get(SearchFieldsES.NATIVE_URL).getValue()); obj.accumulate(map.get(SearchFieldsES.CONTENT_URL).getName(), map.get(SearchFieldsES.CONTENT_URL).getValue()); obj.accumulate(map.get(SearchFieldsES.CACHE_URL).getName(), map.get(SearchFieldsES.CACHE_URL).getValue()); obj.accumulate(map.get(SearchFieldsES.MEMEX_URL).getName(), map.get(SearchFieldsES.MEMEX_URL).getValue()); obj.accumulate(map.get(SearchFieldsES.SHA1).getName(), map.get(SearchFieldsES.SHA1).getValue()); obj.accumulate(map.get(SearchFieldsES.SOURCE).getName(), map.get(SearchFieldsES.SOURCE).getValue()); obj.accumulate(map.get(SearchFieldsES.CONTENT_SHA1).getName(), map.get(SearchFieldsES.CONTENT_SHA1).getValue()); obj.accumulate(map.get(SearchFieldsES.EPOCH).getName(), map.get(SearchFieldsES.EPOCH).getValue()); jArray.add(obj); } } parentObj.accumulate(SEARCH_RESULTS, jArray); return parentObj.toString(); }catch(Exception e){ return e.toString(); }finally{ if(ts!=null) ts.close(); if(esClient!=null) esClient.close(); } } public String GetImageURLsBySha(String sha, String indexName, String indexType){ try{ if(sha.trim() != "") { settings = ImmutableSettings.settingsBuilder() .put(CLUSTER_NAME, CLUSTER_NAME_VALUE).build(); ts = new TransportClient(settings); esClient = ts.addTransportAddress(new InetSocketTransportAddress(ELASTICSEARCH_HOST, ELASTICSEARCH_PORT)); SearchRequestBuilder srbSha = esClient.prepareSearch() .setQuery(QueryBuilders.matchQuery(SearchFieldsES.SHA1, sha)) .setIndices(indexName) .setTypes(indexType) .addField(SearchFieldsES.NATIVE_URL) .addField(SearchFieldsES.CACHE_URL) .addField(SearchFieldsES.CONTENT_URL) .addField(SearchFieldsES.MEMEX_URL) .addField(SearchFieldsES.CONTENT_SHA1) .addField(SearchFieldsES.EPOCH) .addField(SearchFieldsES.SHA1) .addField(SearchFieldsES.SOURCE) .addSort(SearchFieldsES.EPOCH, SortOrder.DESC); multiResp = esClient.prepareMultiSearch() .add(srbSha) .execute() .actionGet(); ts.close(); } MultiSearchResponse.Item item = multiResp.getResponses()[0]; //Can't figure out why would it have more Items than one SearchResponse searchResp = item.getResponse(); SearchHit[] searchHit = searchResp.getHits().getHits(); JSONObject parentObj= new JSONObject(); JSONArray jArray = new JSONArray(); JSONObject obj = new JSONObject(); if(searchHit.length > 0){ SearchHit searchHitLatest = searchHit[0]; //we need the latest, get the first one, sorted by epoch in descending order Map<String,SearchHitField> map = searchHitLatest.getFields(); obj.accumulate(map.get(SearchFieldsES.NATIVE_URL).getName(), map.get(SearchFieldsES.NATIVE_URL).getValue()); obj.accumulate(map.get(SearchFieldsES.CONTENT_URL).getName(), map.get(SearchFieldsES.CONTENT_URL).getValue()); obj.accumulate(map.get(SearchFieldsES.CACHE_URL).getName(), map.get(SearchFieldsES.CACHE_URL).getValue()); obj.accumulate(map.get(SearchFieldsES.MEMEX_URL).getName(), map.get(SearchFieldsES.MEMEX_URL).getValue()); obj.accumulate(map.get(SearchFieldsES.SHA1).getName(), map.get(SearchFieldsES.SHA1).getValue()); obj.accumulate(map.get(SearchFieldsES.SOURCE).getName(), map.get(SearchFieldsES.SOURCE).getValue()); obj.accumulate(map.get(SearchFieldsES.CONTENT_SHA1).getName(), map.get(SearchFieldsES.CONTENT_SHA1).getValue()); obj.accumulate(map.get(SearchFieldsES.EPOCH).getName(), map.get(SearchFieldsES.EPOCH).getValue()); } jArray.add(obj); parentObj.accumulate(SEARCH_RESULTS, jArray); return parentObj.toString(); } catch(Exception e){ return e.toString(); } finally{ if(ts!=null) ts.close(); if(esClient!=null) esClient.close(); } } public String GetImagesAllEpochs(String sha, String indexName, String indexType){ try{ settings = ImmutableSettings.settingsBuilder() .put(CLUSTER_NAME, CLUSTER_NAME_VALUE).build(); ts = new TransportClient(settings); esClient = ts.addTransportAddress(new InetSocketTransportAddress(ELASTICSEARCH_HOST, ELASTICSEARCH_PORT)); SearchRequestBuilder srbSha = esClient.prepareSearch() .setQuery(QueryBuilders.matchQuery(SearchFieldsES.SHA1, sha)) .setIndices(indexName) .setTypes(indexType) .addField(SearchFieldsES.NATIVE_URL) .addField(SearchFieldsES.CACHE_URL) .addField(SearchFieldsES.CONTENT_URL) .addField(SearchFieldsES.MEMEX_URL) .addField(SearchFieldsES.CONTENT_SHA1) .addField(SearchFieldsES.EPOCH) .addField(SearchFieldsES.SHA1) .addField(SearchFieldsES.SOURCE) .addSort(SearchFieldsES.EPOCH, SortOrder.DESC); multiResp = esClient.prepareMultiSearch() .add(srbSha) .execute() .actionGet(); MultiSearchResponse.Item item = multiResp.getResponses()[0]; //should be the first one, no point of getting more than responses. // will check the ElasticCode later to understand it better SearchHit[] searchHit; Map<String,SearchHitField> map ; SearchResponse searchResp = item.getResponse(); searchHit = searchResp.getHits().getHits(); JSONObject parentObj= new JSONObject(); JSONArray jArray = new JSONArray(); if(searchHit.length > 0) { for(SearchHit sr : searchHit){ map = sr.getFields(); JSONObject obj = new JSONObject(); obj.accumulate(map.get(SearchFieldsES.NATIVE_URL).getName(), map.get(SearchFieldsES.NATIVE_URL).getValue()); obj.accumulate(map.get(SearchFieldsES.CONTENT_URL).getName(), map.get(SearchFieldsES.CONTENT_URL).getValue()); obj.accumulate(map.get(SearchFieldsES.CACHE_URL).getName(), map.get(SearchFieldsES.CACHE_URL).getValue()); obj.accumulate(map.get(SearchFieldsES.MEMEX_URL).getName(), map.get(SearchFieldsES.MEMEX_URL).getValue()); obj.accumulate(map.get(SearchFieldsES.SHA1).getName(), map.get(SearchFieldsES.SHA1).getValue()); obj.accumulate(map.get(SearchFieldsES.SOURCE).getName(), map.get(SearchFieldsES.SOURCE).getValue()); obj.accumulate(map.get(SearchFieldsES.CONTENT_SHA1).getName(), map.get(SearchFieldsES.CONTENT_SHA1).getValue()); obj.accumulate(map.get(SearchFieldsES.EPOCH).getName(), map.get(SearchFieldsES.EPOCH).getValue()); jArray.add(obj); } } parentObj.accumulate(SEARCH_RESULTS, jArray); return parentObj.toString(); }catch(Exception e){ return e.toString(); }finally{ if(ts!=null) ts.close(); if(esClient!=null) esClient.close(); } } public String GetPagesURLs(String sha, String epoch,String indexName, String indexType){ try{ settings = ImmutableSettings.settingsBuilder() .put(CLUSTER_NAME, CLUSTER_NAME_VALUE).build(); ts = new TransportClient(settings); esClient = ts.addTransportAddress(new InetSocketTransportAddress(ELASTICSEARCH_HOST, ELASTICSEARCH_PORT)); SearchRequestBuilder srbSha = esClient.prepareSearch() .setQuery(QueryBuilders.matchQuery(SearchFieldsES.SHA1, sha)) .setIndices(indexName) .setTypes(indexType) .addField(SearchFieldsES.NATIVE_URL) .addField(SearchFieldsES.CACHE_URL) .addField(SearchFieldsES.MEMEX_URL) .addField(SearchFieldsES.EPOCH) .addField(SearchFieldsES.SHA1) .addField(SearchFieldsES.SOURCE); SearchRequestBuilder srbEpoch = esClient.prepareSearch() .setQuery(QueryBuilders.matchQuery(SearchFieldsES.EPOCH, epoch)) .setIndices(indexName) .setTypes(indexType) .addField(SearchFieldsES.NATIVE_URL) .addField(SearchFieldsES.CACHE_URL) .addField(SearchFieldsES.MEMEX_URL) .addField(SearchFieldsES.EPOCH) .addField(SearchFieldsES.SHA1) .addField(SearchFieldsES.SOURCE); multiResp = esClient.prepareMultiSearch() .add(srbSha) .add(srbEpoch) .execute() .actionGet(); MultiSearchResponse.Item item = multiResp.getResponses()[0]; SearchHit[] searchHit; Map<String,SearchHitField> map ; SearchResponse searchResp = item.getResponse(); searchHit = searchResp.getHits().getHits(); JSONObject parentObj= new JSONObject(); JSONArray jArray = new JSONArray(); if(searchHit.length > 0) { for(SearchHit sr : searchHit){ map = sr.getFields(); JSONObject obj = new JSONObject(); obj.accumulate(map.get(SearchFieldsES.NATIVE_URL).getName(), map.get(SearchFieldsES.NATIVE_URL).getValue()); obj.accumulate(map.get(SearchFieldsES.CACHE_URL).getName(), map.get(SearchFieldsES.CACHE_URL).getValue()); obj.accumulate(map.get(SearchFieldsES.MEMEX_URL).getName(), map.get(SearchFieldsES.MEMEX_URL).getValue()); obj.accumulate(map.get(SearchFieldsES.SHA1).getName(), map.get(SearchFieldsES.SHA1).getValue()); obj.accumulate(map.get(SearchFieldsES.SOURCE).getName(), map.get(SearchFieldsES.SOURCE).getValue()); obj.accumulate(map.get(SearchFieldsES.EPOCH).getName(), map.get(SearchFieldsES.EPOCH).getValue()); jArray.add(obj); } } parentObj.accumulate(SEARCH_RESULTS, jArray); return parentObj.toString(); }catch(Exception e){ return e.toString(); }finally{ if(ts!=null) ts.close(); if(esClient!=null) esClient.close(); } } public String GetPageURLsBySha(String sha, String indexName, String indexType){ try{ if(sha.trim() != "") { settings = ImmutableSettings.settingsBuilder() .put(CLUSTER_NAME, CLUSTER_NAME_VALUE).build(); ts = new TransportClient(settings); esClient = ts.addTransportAddress(new InetSocketTransportAddress(ELASTICSEARCH_HOST, ELASTICSEARCH_PORT)); SearchRequestBuilder srbSha = esClient.prepareSearch() .setQuery(QueryBuilders.matchQuery(SearchFieldsES.SHA1, sha)) .setIndices(indexName) .setTypes(indexType) .addField(SearchFieldsES.NATIVE_URL) .addField(SearchFieldsES.CACHE_URL) .addField(SearchFieldsES.MEMEX_URL) .addField(SearchFieldsES.EPOCH) .addField(SearchFieldsES.SHA1) .addField(SearchFieldsES.SOURCE) .addSort(SearchFieldsES.EPOCH, SortOrder.DESC); multiResp = esClient.prepareMultiSearch() .add(srbSha) .execute() .actionGet(); ts.close(); } MultiSearchResponse.Item item = multiResp.getResponses()[0]; //Can't figure out why would it have more Items than one SearchResponse searchResp = item.getResponse(); SearchHit[] searchHit = searchResp.getHits().getHits(); JSONObject parentObj= new JSONObject(); JSONArray jArray = new JSONArray(); JSONObject obj = new JSONObject(); if(searchHit.length > 0){ SearchHit searchHitLatest = searchHit[0]; //we need the latest, get the first one, sorted by epoch in descending order Map<String,SearchHitField> map = searchHitLatest.getFields(); obj.accumulate(map.get(SearchFieldsES.NATIVE_URL).getName(), map.get(SearchFieldsES.NATIVE_URL).getValue()); obj.accumulate(map.get(SearchFieldsES.CACHE_URL).getName(), map.get(SearchFieldsES.CACHE_URL).getValue()); obj.accumulate(map.get(SearchFieldsES.MEMEX_URL).getName(), map.get(SearchFieldsES.MEMEX_URL).getValue()); obj.accumulate(map.get(SearchFieldsES.SHA1).getName(), map.get(SearchFieldsES.SHA1).getValue()); obj.accumulate(map.get(SearchFieldsES.SOURCE).getName(), map.get(SearchFieldsES.SOURCE).getValue()); obj.accumulate(map.get(SearchFieldsES.EPOCH).getName(), map.get(SearchFieldsES.EPOCH).getValue()); } jArray.add(obj); parentObj.accumulate(SEARCH_RESULTS, jArray); return parentObj.toString(); } catch(Exception e){ return e.toString(); } finally{ if(ts!=null) ts.close(); if(esClient!=null) esClient.close(); } } public String GetPagesAllEpochs(String sha, String indexName, String indexType){ try{ settings = ImmutableSettings.settingsBuilder() .put(CLUSTER_NAME, CLUSTER_NAME_VALUE).build(); ts = new TransportClient(settings); esClient = ts.addTransportAddress(new InetSocketTransportAddress(ELASTICSEARCH_HOST, ELASTICSEARCH_PORT)); SearchRequestBuilder srbSha = esClient.prepareSearch() .setQuery(QueryBuilders.matchQuery(SearchFieldsES.SHA1, sha)) .setIndices(indexName) .setTypes(indexType) .addField(SearchFieldsES.NATIVE_URL) .addField(SearchFieldsES.CACHE_URL) .addField(SearchFieldsES.MEMEX_URL) .addField(SearchFieldsES.EPOCH) .addField(SearchFieldsES.SHA1) .addField(SearchFieldsES.SOURCE) .addSort(SearchFieldsES.EPOCH, SortOrder.DESC); multiResp = esClient.prepareMultiSearch() .add(srbSha) .execute() .actionGet(); MultiSearchResponse.Item item = multiResp.getResponses()[0]; //should be the first one, no point of getting more than responses. // will check the ElasticCode later to understand it better SearchHit[] searchHit; Map<String,SearchHitField> map ; SearchResponse searchResp = item.getResponse(); searchHit = searchResp.getHits().getHits(); JSONObject parentObj= new JSONObject(); JSONArray jArray = new JSONArray(); if(searchHit.length > 0) { for(SearchHit sr : searchHit){ map = sr.getFields(); JSONObject obj = new JSONObject(); obj.accumulate(map.get(SearchFieldsES.NATIVE_URL).getName(), map.get(SearchFieldsES.NATIVE_URL).getValue()); obj.accumulate(map.get(SearchFieldsES.CACHE_URL).getName(), map.get(SearchFieldsES.CACHE_URL).getValue()); obj.accumulate(map.get(SearchFieldsES.MEMEX_URL).getName(), map.get(SearchFieldsES.MEMEX_URL).getValue()); obj.accumulate(map.get(SearchFieldsES.SHA1).getName(), map.get(SearchFieldsES.SHA1).getValue()); obj.accumulate(map.get(SearchFieldsES.SOURCE).getName(), map.get(SearchFieldsES.SOURCE).getValue()); obj.accumulate(map.get(SearchFieldsES.EPOCH).getName(), map.get(SearchFieldsES.EPOCH).getValue()); jArray.add(obj); } } parentObj.accumulate(SEARCH_RESULTS, jArray); return parentObj.toString(); }catch(Exception e){ return e.toString(); }finally{ if(ts!=null) ts.close(); if(esClient!=null) esClient.close(); } } }
package org.jboss.remoting3.spi; /** * A descriptor for automatically-discovered connection provider types. Since instances of this interface are * constructed automatically, implementing classes should have a no-arg constructor. * <p> * To add an automatically-discovered provider, create a file called {@code "META-INF/services/org.jboss.remoting3.spi.ConnectionProviderDescriptor"} * and populate it with the names of classes that implement this interface. * * @see java.util.ServiceLoader */ public interface ConnectionProviderDescriptor { /** * Get the URI scheme for this provider. A provider factory may be registered more than one time with different * URI schemes. * * @return the URI scheme */ String getUriScheme(); /** * Get the connection provider factory to associate with the given URI scheme. * * @return the connection provider factory */ ConnectionProviderFactory<?> getConnectionProviderFactory(); }
package org.ihtsdo.otf.mapping.jpa.services; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.EntityTransaction; import javax.persistence.Persistence; import org.apache.log4j.Logger; import org.ihtsdo.otf.mapping.helpers.PfsParameter; import org.ihtsdo.otf.mapping.helpers.SearchResult; import org.ihtsdo.otf.mapping.helpers.SearchResultJpa; import org.ihtsdo.otf.mapping.helpers.SearchResultList; import org.ihtsdo.otf.mapping.helpers.SearchResultListJpa; import org.ihtsdo.otf.mapping.helpers.ValidationResult; import org.ihtsdo.otf.mapping.helpers.WorkflowStatus; import org.ihtsdo.otf.mapping.jpa.MapRecordJpa; import org.ihtsdo.otf.mapping.jpa.handlers.DefaultProjectSpecificAlgorithmHandler; import org.ihtsdo.otf.mapping.model.MapProject; import org.ihtsdo.otf.mapping.model.MapRecord; import org.ihtsdo.otf.mapping.model.MapUser; import org.ihtsdo.otf.mapping.rf2.Concept; import org.ihtsdo.otf.mapping.services.ContentService; import org.ihtsdo.otf.mapping.services.MappingService; import org.ihtsdo.otf.mapping.services.WorkflowService; import org.ihtsdo.otf.mapping.workflow.WorkflowTrackingRecord; import org.ihtsdo.otf.mapping.workflow.WorkflowTrackingRecordJpa; /** * Default workflow service implementation. */ public class WorkflowServiceJpa implements WorkflowService { /** The factory. */ private static EntityManagerFactory factory; /** The manager. */ private EntityManager manager; /** The transaction per operation. */ private boolean transactionPerOperation = true; /** The transaction entity. */ private EntityTransaction tx; /** * Instantiates an empty {@link WorkflowServiceJpa}. */ public WorkflowServiceJpa() { if (factory == null || !factory.isOpen()) { factory = Persistence.createEntityManagerFactory("MappingServiceDS"); } manager = factory.createEntityManager(); } @SuppressWarnings("unchecked") @Override public List<WorkflowTrackingRecord> getWorkflowTrackingRecords(MapProject mapProject) { return manager.createQuery("select tr from WorkflowTrackingRecordJpa tr where mapProject_id = :mapProjectId") .setParameter("mapProjectId", mapProject.getId()) .getResultList(); } /* (non-Javadoc) * @see org.ihtsdo.otf.mapping.services.WorkflowService# Workflow(org.ihtsdo.otf.mapping.model.MapProject) */ @Override public void computeWorkflow(MapProject project) throws Exception { /** Remove any existing workflow object for this map project */ for (WorkflowTrackingRecord tr : getWorkflowTrackingRecords(project)) { removeWorkflowTrackingRecord(tr.getId()); } /** Create a new Workflow object for this map project and set map project and persist it*/ if (getTransactionPerOperation()) { EntityTransaction tx = manager.getTransaction(); tx.begin(); } /** find all unmapped, in scope concepts for the specified project*/ ContentService contentService = new ContentServiceJpa(); MappingService mappingService = new MappingServiceJpa(); SearchResultList searchResultList = mappingService.findUnmappedConceptsInScope(project.getId()); for (SearchResult sr : searchResultList.getSearchResults()) { Concept concept = contentService.getConcept(sr.getTerminologyId(), sr.getTerminology(), sr.getTerminologyVersion()); /* Create a workflow tracking record and persist it */ WorkflowTrackingRecord trackingRecord = new WorkflowTrackingRecordJpa(); trackingRecord.setMapProject(project); trackingRecord.setTerminology(concept.getTerminology()); trackingRecord.setTerminologyId(concept.getTerminologyId()); trackingRecord.setTerminologyVersion(concept.getTerminologyVersion()); trackingRecord.setDefaultPreferredName(concept.getDefaultPreferredName()); /* set sortKey to the first tree position for the concept */ SearchResultList treePositionsList = contentService.findTreePositionsForConcept(concept.getTerminologyId(), concept.getTerminology(), concept.getTerminologyVersion()); trackingRecord.setSortKey(treePositionsList.getSearchResults().get(0).getValue()); manager.persist(trackingRecord); /* get MapRecords for this concept in this project */ List<MapRecord> mapRecords = mappingService.getMapRecordsForConcept(concept.getTerminologyId()); boolean conflictDetected = true; boolean earlyStage = false; Set<MapUser> assignedUsers = new HashSet<>(); if (mapRecords == null || mapRecords.size() == 0) { trackingRecord.setHasDiscrepancy(false); continue; } for (MapRecord mapRecord : mapRecords) { if (!mapRecord.getMapProjectId().equals(project.getId())) continue; assignedUsers.add(mapRecord.getOwner()); if (!mapRecord.getWorkflowStatus().equals(WorkflowStatus.CONFLICT_DETECTED)) conflictDetected = false; if (mapRecord.getWorkflowStatus().equals(WorkflowStatus.NEW) || mapRecord.getWorkflowStatus().equals(WorkflowStatus.EDITING_IN_PROGRESS) || mapRecord.getWorkflowStatus().equals(WorkflowStatus.EDITING_DONE)) earlyStage = true; } if (conflictDetected) { trackingRecord.setHasDiscrepancy(true); trackingRecord.setAssignedUsers(assignedUsers); trackingRecord.setMapRecords(new HashSet<>(mapRecords)); } else if (earlyStage) { trackingRecord.setAssignedUsers(assignedUsers); trackingRecord.setMapRecords(new HashSet<>(mapRecords)); } else { throw new Exception("ComputeWorkflow exception."); } } Logger.getLogger(WorkflowServiceJpa.class).info("Done computing workflow"); mappingService.close(); contentService.close(); if (getTransactionPerOperation()) { EntityTransaction tx = manager.getTransaction(); tx.commit(); } } /* (non-Javadoc) * @see org.ihtsdo.otf.mapping.services.WorkflowService#getWorkflowTrackingRecord(org.ihtsdo.otf.mapping.model.MapProject, org.ihtsdo.otf.mapping.rf2.Concept) */ @Override public WorkflowTrackingRecord getWorkflowTrackingRecord(MapProject project, Concept c) throws Exception { return (WorkflowTrackingRecord) manager.createQuery("select tr from WorkflowTrackingRecordJpa tr " + "where mapProject_id = :mapProjectId " + "and terminologyId = :terminologyId " + "and terminology = :terminology " + "and terminologyVersion = :terminologyVersion") .setParameter("mapProjectId", project.getId()) .setParameter("terminologyId", c.getTerminologyId()) .setParameter("terminology", c.getTerminology()) .setParameter("terminologyVersion", c.getTerminologyVersion()) .getSingleResult(); } /* (non-Javadoc) * @see org.ihtsdo.otf.mapping.services.WorkflowService#removeWorkflowTrackingRecord(org.ihtsdo.otf.mapping.model.MapProject, org.ihtsdo.otf.mapping.workflow.WorkflowTrackingRecord) */ @Override public void removeWorkflowTrackingRecord(Long workflowTrackingRecordId) throws Exception { if (getTransactionPerOperation()) { EntityTransaction tx = manager.getTransaction(); tx.begin(); WorkflowTrackingRecord ma = manager.find(WorkflowTrackingRecordJpa.class, workflowTrackingRecordId); if (manager.contains(ma)) { manager.remove(ma); } else { manager.remove(manager.merge(ma)); } tx.commit(); } else { WorkflowTrackingRecord ma = manager.find(WorkflowTrackingRecordJpa.class, workflowTrackingRecordId); if (manager.contains(ma)) { manager.remove(ma); } else { manager.remove(manager.merge(ma)); } } } /* (non-Javadoc) * @see org.ihtsdo.otf.mapping.services.WorkflowService#updateWorkflowTrackingRecord(org.ihtsdo.otf.mapping.model.MapProject, org.ihtsdo.otf.mapping.workflow.WorkflowTrackingRecord) */ @Override public void updateWorkflowTrackingRecord(WorkflowTrackingRecord record) throws Exception { if (getTransactionPerOperation()) { EntityTransaction tx = manager.getTransaction(); tx.begin(); manager.merge(record); tx.commit(); // manager.close(); } else { manager.merge(record); } } /* (non-Javadoc) * @see org.ihtsdo.otf.mapping.services.WorkflowService#assignUserToConcept(org.ihtsdo.otf.mapping.model.MapProject, org.ihtsdo.otf.mapping.rf2.Concept, org.ihtsdo.otf.mapping.model.MapUser) */ @Override public MapRecord assignUserToConcept(MapProject project, Concept concept, MapUser user) throws Exception { /** Creates map record (set owner (user) and workflow status (NEW)) */ MapRecord mapRecord = new MapRecordJpa(); mapRecord.setOwner(user); mapRecord.setConceptId(concept.getTerminologyId()); mapRecord.setMapProjectId(project.getId()); mapRecord.setConceptName(concept.getDefaultPreferredName()); mapRecord.setWorkflowStatus(WorkflowStatus.NEW); mapRecord.setTimestamp(System.currentTimeMillis()); // TODO: need to compute descendants here? mapRecord.setCountDescendantConcepts(0L); mapRecord.setFlagForConsensusReview(false); mapRecord.setFlagForEditorialReview(false); mapRecord.setFlagForMapLeadReview(false); mapRecord.setLastModifiedBy(user); mapRecord.setLastModified(System.currentTimeMillis()); MappingService mappingService = new MappingServiceJpa(); mappingService.addMapRecord(mapRecord); mappingService.close(); /** Get the workflow tracking record for this map project/concept (throw error if not found) */ WorkflowTrackingRecord trackingRecord = getWorkflowTrackingRecord(project, concept); if (trackingRecord == null) throw new Exception("WorkflowTrackingRecord for project: " + project + " concept: " + concept + "was not found."); /** Adds user and map record to workflow tracking record, throw an exception if the user is already in the assignedUsers */ if (trackingRecord.getAssignedUsers().contains(user)) throw new Exception("User " + user.getUserName() + " is already assigned to this tracking record: " + trackingRecord); trackingRecord.addAssignedUser(user); trackingRecord.addMapRecord(mapRecord); if (getTransactionPerOperation()) { EntityTransaction tx = manager.getTransaction(); tx.begin(); manager.persist(trackingRecord); tx.commit(); } else { manager.persist(trackingRecord); } return mapRecord; } /* (non-Javadoc) * @see org.ihtsdo.otf.mapping.services.WorkflowService#assignUserToConcept(org.ihtsdo.otf.mapping.model.MapProject, org.ihtsdo.otf.mapping.rf2.Concept, org.ihtsdo.otf.mapping.model.MapRecord, org.ihtsdo.otf.mapping.model.MapUser) */ @Override public void assignUserToConcept(MapProject project, Concept concept, MapRecord initialRecord, MapUser user) throws Exception { /** clone initialRecord as starting point */ MapRecord mapRecord = new MapRecordJpa(initialRecord); /** set id of the cloned record to null (so Hibernate will assign a new one)*/ mapRecord.setId(null); /** add the id of the initial record to the "origin ids" list of the cloned record */ mapRecord.addOrigin(initialRecord.getId()); mapRecord.setLastModifiedBy(user); mapRecord.setLastModified(System.currentTimeMillis()); /** find the workflowTrackingRecord and add user and record to it */ WorkflowTrackingRecord trackingRecord = getWorkflowTrackingRecord(project, concept); trackingRecord.addAssignedUser(user); trackingRecord.addMapRecord(mapRecord); if (getTransactionPerOperation()) { EntityTransaction tx = manager.getTransaction(); tx.begin(); manager.persist(trackingRecord); tx.commit(); } else { manager.persist(trackingRecord); } } /* (non-Javadoc) * @see org.ihtsdo.otf.mapping.services.WorkflowService#getMapRecordsAssignedToUser(org.ihtsdo.otf.mapping.model.MapProject, org.ihtsdo.otf.mapping.model.MapUser) */ // TODO Add pfs support to this routine @Override public Set<MapRecord> getMapRecordsAssignedToUser(MapProject project, MapUser user) throws Exception { Set<MapRecord> mapRecordsAssigned = new HashSet<>(); /** iterate through all workflow tracking records (for unmapped in scope concepts) * and find cases where there is a map record entry where that user is the owner*/ for (WorkflowTrackingRecord trackingRecord : getWorkflowTrackingRecords(project)) { for (MapRecord mapRecord : trackingRecord.getMapRecords()) { if (mapRecord.getOwner().equals(user)) { mapRecordsAssigned.add(mapRecord); } } } return mapRecordsAssigned; } /* (non-Javadoc) * @see org.ihtsdo.otf.mapping.services.WorkflowService#unassignUserFromConcept(org.ihtsdo.otf.mapping.model.MapProject, org.ihtsdo.otf.mapping.rf2.Concept, org.ihtsdo.otf.mapping.model.MapUser) */ @Override public void unassignUserFromConcept(MapProject project, Concept concept, MapUser user) throws Exception { MappingService mappingService = new MappingServiceJpa(); /** iterate thru tracking records until you find one for the given concept/user combination */ for (WorkflowTrackingRecord trackingRecord : getWorkflowTrackingRecords(project)) { if (trackingRecord.getTerminologyId().equals(concept.getTerminologyId()) && trackingRecord.getAssignedUsers().contains(user)) { /** remove the user and the mapping record from the tracking record and save the tracking record */ trackingRecord.removeAssignedUser(user); // go through all mapRecords whose owner is that user for (MapRecord mapRecord : trackingRecord.getMapRecords()) { if (mapRecord.getOwner().equals(user)) { System.out.println("Removing record"); trackingRecord.removeMapRecord(mapRecord); // update the record updateWorkflowTrackingRecord(trackingRecord); // delete the record mappingService.removeMapRecord(mapRecord.getId()); } } } } mappingService.close(); } /** * Returns the available tracking records for workflow and user. * * @param workflowId the workflow id * @param userId the user id * @return the available tracking records for workflow and user */ @SuppressWarnings("unchecked") @Override public List<WorkflowTrackingRecord> getAvailableTrackingRecordsForProjectAndUser(Long mapProjectId, Long userId) { // return workflow tracking records where: // - this user is not in the list of assigned users // - the list of assigned users has 0 or 1 elements // - the workfowId matches the id of this workflow javax.persistence.Query query = manager.createQuery( "SELECT tr FROM WorkflowTrackingRecordJpa tr " + "WHERE NOT EXISTS (from tr.assignedUsers as user where user.id = " + userId.toString() + ") AND size(tr.assignedUsers) < 2 AND mapProject_id = " + mapProjectId.toString()); return query.getResultList(); } // TODO DIscuss model change to have WorkflowTrackingRecords directly connected to Workflow // i.e. WorkflowTrackingRecord->Workflow (analogous to Record->Entry) // this would enable searching and sorting in the hibernate environment // TODO If above is not desirable, consider converting workflow.getTrackingRecords return a sorted list // This would avoid some clumsy manipulation here @Override public SearchResultList findAvailableWork(MapProject mapProject, MapUser mapUser, PfsParameter pfsParameter) { System.out.println("find available work for project " + mapProject.getName() + ", " + mapUser.getId().toString()); // create return object SearchResultList results = new SearchResultListJpa(); List<WorkflowTrackingRecord> trackingRecords = getAvailableTrackingRecordsForProjectAndUser(mapProject.getId(), mapUser.getId()); // sort list of tracking records (see TODO above) Collections.sort( trackingRecords, new Comparator<WorkflowTrackingRecord>() { @Override public int compare(WorkflowTrackingRecord w1, WorkflowTrackingRecord w2) { return w1.getSortKey().compareTo(w2.getSortKey()); } }); // set the total count // TODO This will return erroneous count if records are aleady assigned to this user // Need a better way to query for records (see TODO above) results.setTotalCount(new Long(trackingRecords.size())); // paging parameters int startIndex, maxResults; // if paging requested, retrieve parameters if (pfsParameter != null && pfsParameter.getStartIndex() != -1 && pfsParameter.getMaxResults() != -1) { startIndex = pfsParameter.getStartIndex(); maxResults = pfsParameter.getMaxResults(); // else no paging requested, return all tracking records } else { startIndex = 0; maxResults = trackingRecords.size(); } // start at start index, continue until end of list or page size reached for ( int i = startIndex; i < trackingRecords.size() && results.getCount() <= maxResults; i++) { WorkflowTrackingRecord trackingRecord = trackingRecords.get(i); // currently a redundant check if (!trackingRecord.getAssignedUsers().contains(mapUser) && trackingRecord.getAssignedUsers().size() < 2) { SearchResult result = new SearchResultJpa(); result.setId(trackingRecord.getId()); result.setTerminology(trackingRecord.getTerminology()); result.setTerminologyId(trackingRecord.getTerminologyId()); result.setTerminologyVersion(trackingRecord.getTerminologyVersion()); result.setValue(trackingRecord.getDefaultPreferredName()); results.addSearchResult(result); } } // return search results return results; } @Override public Set<WorkflowTrackingRecord> getTrackingRecordsForConflictConcepts(MapProject mapProject) { Set<WorkflowTrackingRecord> conflictRecords = new HashSet<>(); for (WorkflowTrackingRecord trackingRecord : getWorkflowTrackingRecords(mapProject)) { if (trackingRecord.isHasDiscrepancy()) conflictRecords.add(trackingRecord); } return conflictRecords; } /* (non-Javadoc) * @see org.ihtsdo.otf.mapping.services.WorkflowService#close() */ @Override public void close() throws Exception { if (manager.isOpen()) { manager.close(); } } /* (non-Javadoc) * @see org.ihtsdo.otf.mapping.services.WorkflowService#getTransactionPerOperation() */ @Override public boolean getTransactionPerOperation() throws Exception { return transactionPerOperation; } /* (non-Javadoc) * @see org.ihtsdo.otf.mapping.services.WorkflowService#setTransactionPerOperation(boolean) */ @Override public void setTransactionPerOperation(boolean transactionPerOperation) throws Exception { this.transactionPerOperation = transactionPerOperation; } /* (non-Javadoc) * @see org.ihtsdo.otf.mapping.services.WorkflowService#beginTransaction() */ @Override public void beginTransaction() throws Exception { if (getTransactionPerOperation()) throw new IllegalStateException( "Error attempting to begin a transaction when using transactions per operation mode."); else if (tx != null && tx.isActive()) throw new IllegalStateException( "Error attempting to begin a transaction when there " + "is already an active transaction"); tx = manager.getTransaction(); tx.begin(); } /* (non-Javadoc) * @see org.ihtsdo.otf.mapping.services.WorkflowService#commit() */ @Override public void commit() throws Exception { if (getTransactionPerOperation()) throw new IllegalStateException( "Error attempting to commit a transaction when using transactions per operation mode."); else if (tx != null && !tx.isActive()) throw new IllegalStateException( "Error attempting to commit a transaction when there " + "is no active transaction"); tx.commit(); } @Override public Map<Long, Long> compareFinishedMapRecords(MapProject mapProject) throws Exception { Map<MapRecord, MapRecord> finishedPairsForComparison = new HashMap<MapRecord, MapRecord>(); Map<Long, Long> conflicts = new HashMap<Long, Long>(); MappingService mappingService = new MappingServiceJpa(); List<MapRecord> allMapRecords = mappingService.getMapRecordsForMapProject(mapProject.getId()); List<MapRecord> finishedMapRecords = new ArrayList<>(); for (MapRecord mapRecord : allMapRecords) { if (mapRecord.getWorkflowStatus().equals(WorkflowStatus.EDITING_DONE)) finishedMapRecords.add(mapRecord); } MapRecord[] mapRecords = finishedMapRecords.toArray(new MapRecord[0]); for (int i=0; i<mapRecords.length; i++) { for (int j=0; j<mapRecords.length; j++) { if (mapRecords[i].getConceptId().equals(mapRecords[j].getConceptId()) && mapRecords[i].getLastModified() < mapRecords[j].getLastModified() && mapRecords[i].getId() != mapRecords[j].getId()) { finishedPairsForComparison.put(mapRecords[i], mapRecords[j]); } } } for (Entry<MapRecord, MapRecord> entry : finishedPairsForComparison.entrySet()) { DefaultProjectSpecificAlgorithmHandler handler = new DefaultProjectSpecificAlgorithmHandler(); ValidationResult result = handler.compareMapRecords(entry.getKey(), entry.getValue()); if (!result.isValid()) { conflicts.put(entry.getKey().getId(), entry.getValue().getId()); entry.getKey().setWorkflowStatus(WorkflowStatus.CONFLICT_DETECTED); mappingService.updateMapRecord(entry.getKey()); entry.getValue().setWorkflowStatus(WorkflowStatus.CONFLICT_DETECTED); mappingService.updateMapRecord(entry.getValue()); } } return conflicts; } @Override public Set<WorkflowTrackingRecord> getTrackingRecordsForUnmappedInScopeConcepts(MapProject mapProject) { Set<WorkflowTrackingRecord> unmappedTrackingRecords = new HashSet<>(); for (WorkflowTrackingRecord trackingRecord : getWorkflowTrackingRecords(mapProject)) { if (!trackingRecord.isHasDiscrepancy()) unmappedTrackingRecords.add(trackingRecord); } return unmappedTrackingRecords; } @Override public SearchResultList findMapRecordsAssignedToUser(MapProject project, MapUser user, PfsParameter pfsParameter) { List<MapRecord> mapRecordsAssigned = new ArrayList<>(); // cycle over all tracking records for (WorkflowTrackingRecord trackingRecord : getWorkflowTrackingRecords(project)) { for (MapRecord mapRecord : trackingRecord.getMapRecords()) { if (mapRecord.getOwner().equals(user)) { mapRecordsAssigned.add(mapRecord); } } } // sort the collection by concept name Collections.sort(mapRecordsAssigned, new Comparator<MapRecord>() { @Override public int compare(MapRecord w1, MapRecord w2) { return w1.getConceptName().compareTo(w2.getConceptName()); } }); // construct return list and set the total results SearchResultList searchResults = new SearchResultListJpa(); searchResults.setTotalCount(new Long(mapRecordsAssigned.size())); // calculate the start and end indexes from the pfs parameter int startIndex = pfsParameter.getStartIndex() == -1 ? 0 : pfsParameter.getStartIndex(); int endIndex = pfsParameter.getMaxResults() == -1 ? mapRecordsAssigned.size() : Math.min(mapRecordsAssigned.size(), startIndex + pfsParameter.getMaxResults()); for (MapRecord mapRecord : mapRecordsAssigned.subList(startIndex, endIndex)) { SearchResult searchResult = new SearchResultJpa(); searchResult.setId(mapRecord.getId()); searchResult.setTerminology(project.getSourceTerminology()); searchResult.setTerminologyVersion(project.getSourceTerminologyVersion()); searchResult.setTerminologyId(mapRecord.getConceptId()); searchResult.setValue(mapRecord.getConceptName()); searchResults.addSearchResult(searchResult); } return searchResults; } }
package org.languagetool.rules.de; import org.languagetool.AnalyzedSentence; import org.languagetool.AnalyzedToken; import org.languagetool.AnalyzedTokenReadings; import org.languagetool.JLanguageTool; import org.languagetool.language.German; import org.languagetool.rules.Category; import org.languagetool.rules.Example; import org.languagetool.rules.RuleMatch; import org.languagetool.rules.patterns.PatternToken; import org.languagetool.rules.patterns.PatternTokenBuilder; import org.languagetool.tagging.de.GermanTagger; import org.languagetool.tagging.de.GermanToken; import org.languagetool.tagging.de.GermanToken.POSType; import org.languagetool.tagging.disambiguation.rules.DisambiguationPatternRule; import org.languagetool.tools.StringTools; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.*; import java.util.regex.Pattern; /** * Check that adjectives and verbs are not written with an uppercase * first letter (except at the start of a sentence) and cases * like this: <tt>Das laufen f&auml;llt mir leicht.</tt> (<tt>laufen</tt> needs * to be uppercased). * * @author Daniel Naber */ public class CaseRule extends GermanRule { private static final Pattern NUMERALS_EN = Pattern.compile("[a-z]|[0-9]+|(m{0,4}(cm|cd|d?c{0,3})(xc|xl|l?x{0,3})(ix|iv|v?i{0,3}))$"); private static final Set<String> nounIndicators = new HashSet<>(); // also see case_rule_exception.txt: private static final List<List<PatternToken>> ANTI_PATTERNS = Arrays.asList( Arrays.asList( new PatternTokenBuilder().tokenRegex("Vereinigte[ns]?").build(), new PatternTokenBuilder().tokenRegex("Staaten|Königreiche?s?").build() ), Arrays.asList( new PatternTokenBuilder().token("Hin").build(), new PatternTokenBuilder().token("und").build(), new PatternTokenBuilder().token("Her").build() ) ); static { nounIndicators.add("das"); nounIndicators.add("sein"); nounIndicators.add("mein"); nounIndicators.add("dein"); nounIndicators.add("euer"); nounIndicators.add("unser"); } private static final Set<String> sentenceStartExceptions = new HashSet<>(); static { sentenceStartExceptions.add("("); sentenceStartExceptions.add(":"); sentenceStartExceptions.add("\""); sentenceStartExceptions.add("'"); sentenceStartExceptions.add("„"); sentenceStartExceptions.add("“"); sentenceStartExceptions.add("«"); sentenceStartExceptions.add("»"); sentenceStartExceptions.add("."); } /* * These are words that Morphy only knows as non-nouns (or not at all). * The proper solution is to add all those to our Morphy data, but as a simple * workaround to avoid false alarms, these words can be added here. */ private static final Set<String> exceptions = new HashSet<>(Arrays.asList( "Dienstreisender", "Verletzte", "Vermisste", "Äußeres", "Abseits", "Beschäftigter", "Beschäftigte", "Beschäftigten", "Üblichen", "Bekannter", "Bekannte", "Tel", // Tel. = Telefon "Unschuldiger", "Vorgesetzter", "Abs", "Klappe", "Vorfahre", "Mittler", "Hr", "Schwarz", "Genese", "Rosa", "Auftrieb", "Zuschnitt", "Geschossen", "Vortrieb", "Abtrieb", "Gesandter", "Durchfahrt", "Durchgriff", "Überfahrt", "Zeche", "Sparte", "Sparten", "Heiliger", "Reisender", "Hochdeutsch", "Pest", "Schwinge", "Verlies", "Nachfolge", "Stift", "Belange", "Geistlicher", "Jenseits", "Abends", "Abgeordneter", "Angestellter", "Liberaler", "Abriss", "Ahne", "Ähnlichem", "Ähnliches", "Allerlei", "Anklang", "Anstrich", "Armes", "Aus", // "vor dem Aus stehen" "Ausdrücke", "Auswüchsen", "Bände", "Bänden", "Beauftragter", "Belange", "besonderes", "Biss", "De", // "De Morgan" etc "Dr", "Durcheinander", "Eindrücke", "Erwachsener", "Flöße", "Folgendes", "Fort", "Fraß", "Für", "Genüge", "Gläubiger", "Goldener", // Goldener Schnitt "Guten", // das Kap der Guten Hoffnung "Hechte", "Herzöge", "Herzögen", "Hinfahrt", "Hundert", "Ihnen", "Ihr", "Ihre", "Ihrem", "Ihren", "Ihrer", "Ihres", "Infrarot", "Jenseits", "Jugendlicher", "Jünger", "Klaue", "Konditional", "Krähe", "Kurzem", "Landwirtschaft", "Langem", "Längerem", "Le", // "Le Monde" etc "Letzt", "Letzt", // "zu guter Letzt" "Letztere", "Letzterer", "Letzteres", "Link", "Links", "Löhne", "Luden", "Mitfahrt", "Mr", "Mrd", "Mrs", "Nachfrage", "Nachts", // "des Nachts", "eines Nachts" "Nähte", "Nähten", "Neuem", "Nr", "Nutze", // zu Nutze "Obdachloser", "Oder", // der Fluss "Patsche", "Pfiffe", "Pfiffen", "Prof", "Puste", "Sachverständiger", "Sankt", "Scheine", "Scheiße", "Schuft", "Schufte", "Schuld", "Schwärme", "Schwarzes", // Schwarzes Brett "Sie", "Spitz", "St", // Paris St. Germain "Stereotyp", "Störe", "Tausend", "Toter", "tun", "Übrigen", "Unvorhergesehenes", "Verantwortlicher", "Verwandter", "Vielfaches", "Vorsitzender", "Fraktionsvorsitzender", "Weitem", "Weiteres", "Wicht", "Wichtiges", "Wider", "Wild", "Zeche", "Zusage", "Zwinge", "Tertiär", // geologischer Zeitabschnitt "Erster", // "er wurde Erster im Langlauf" "Zweiter", "Dritter", "Vierter", "Fünfter", "Sechster", "Siebter", "Achter", "Neunter", "Erste", // "sie wurde Erste im Langlauf" "Zweite", "Dritte", "Vierte", "Fünfte", "Sechste", "Siebte", "Achte", "Neunte", "Dein", "Deine", "Deinem", "Deinen", "Deiner", "Deines", "Dich", "Dir", "Du", "Euch", "Euer", "Eure", "Eurem", "Euren", "Eures" )); private static final Set<String> languages = new HashSet<>(); static { // TODO: alle Sprachen languages.add("Afrikanisch"); languages.add("Altarabisch"); languages.add("Altchinesisch"); languages.add("Altgriechisch"); languages.add("Althochdeutsch"); languages.add("Altpersisch"); languages.add("Amerikanisch"); languages.add("Arabisch"); languages.add("Chinesisch"); languages.add("Dänisch"); languages.add("Deutsch"); languages.add("Englisch"); languages.add("Finnisch"); languages.add("Französisch"); languages.add("Frühneuhochdeutsch"); languages.add("Germanisch"); languages.add("Griechisch"); languages.add("Hocharabisch"); languages.add("Hochchinesisch"); languages.add("Hochdeutsch"); languages.add("Holländisch"); languages.add("Italienisch"); languages.add("Japanisch"); languages.add("Jiddisch"); languages.add("Jugoslawisch"); languages.add("Koreanisch"); languages.add("Kroatisch"); languages.add("Lateinisch"); languages.add("Luxemburgisch"); languages.add("Mittelhochdeutsch"); languages.add("Neuhochdeutsch"); languages.add("Niederländisch"); languages.add("Norwegisch"); languages.add("Persisch"); languages.add("Polnisch"); languages.add("Portugiesisch"); languages.add("Russisch"); languages.add("Schwedisch"); languages.add("Schweizerisch"); languages.add("Serbisch"); languages.add("Serbokroatisch"); languages.add("Slawisch"); languages.add("Spanisch"); languages.add("Tschechisch"); languages.add("Türkisch"); languages.add("Ukrainisch"); languages.add("Ungarisch"); languages.add("Weißrussisch"); } private static final Set<String> myExceptionPhrases = CaseRuleExceptions.getExceptions(); private static final Set<String> substVerbenExceptions = new HashSet<>(); static { substVerbenExceptions.add("hinziehen"); substVerbenExceptions.add("helfen"); substVerbenExceptions.add("lassen"); substVerbenExceptions.add("passieren"); substVerbenExceptions.add("machen"); // "Du kannst das machen." substVerbenExceptions.add("haben"); // "Das haben schon viele versucht." substVerbenExceptions.add("passiert"); // "Das passiert..." substVerbenExceptions.add("beschränkt"); substVerbenExceptions.add("wiederholt"); substVerbenExceptions.add("scheinen"); substVerbenExceptions.add("klar"); substVerbenExceptions.add("heißen"); substVerbenExceptions.add("einen"); substVerbenExceptions.add("gehören"); substVerbenExceptions.add("bedeutet"); // "und das bedeutet..." substVerbenExceptions.add("ermöglicht"); substVerbenExceptions.add("funktioniert"); // "Das funktioniert..." substVerbenExceptions.add("sollen"); substVerbenExceptions.add("werden"); substVerbenExceptions.add("dürfen"); substVerbenExceptions.add("müssen"); substVerbenExceptions.add("so"); substVerbenExceptions.add("ist"); substVerbenExceptions.add("können"); substVerbenExceptions.add("mein"); // "etwas, das mein Interesse geweckt hat" substVerbenExceptions.add("sein"); substVerbenExceptions.add("muss"); substVerbenExceptions.add("muß"); substVerbenExceptions.add("wollen"); substVerbenExceptions.add("habe"); substVerbenExceptions.add("ein"); // nicht "einen" (Verb) substVerbenExceptions.add("tun"); // "...dann wird er das tun." substVerbenExceptions.add("bestätigt"); substVerbenExceptions.add("bestätigte"); substVerbenExceptions.add("bestätigten"); substVerbenExceptions.add("bekommen"); substVerbenExceptions.add("sauer"); } private final GermanTagger tagger; private final German german; public CaseRule(final ResourceBundle messages, final German german) { this.german = german; super.setCategory(new Category(messages.getString("category_case"))); this.tagger = (GermanTagger) german.getTagger(); addExamplePair(Example.wrong("<marker>Das laufen</marker> fällt mir schwer."), Example.fixed("<marker>Das Laufen</marker> fällt mir schwer.")); } @Override public String getId() { return "DE_CASE"; } @Override public URL getUrl() { try { return new URL("http: } catch (MalformedURLException e) { throw new RuntimeException(e); } } @Override public String getDescription() { return "Großschreibung von Nomen und substantivierten Verben"; } @Override public RuleMatch[] match(final AnalyzedSentence sentence) throws IOException { final List<RuleMatch> ruleMatches = new ArrayList<>(); final AnalyzedTokenReadings[] tokens = getSentenceWithImmunization(sentence).getTokensWithoutWhitespace(); boolean prevTokenIsDas = false; for (int i = 0; i < tokens.length; i++) { //Note: defaulting to the first analysis is only save if we only query for sentence start final String posToken = tokens[i].getAnalyzedToken(0).getPOSTag(); if (posToken != null && posToken.equals(JLanguageTool.SENTENCE_START_TAGNAME)) { continue; } if (i == 1) { // don't care about first word, UppercaseSentenceStartRule does this already if (nounIndicators.contains(tokens[1].getToken().toLowerCase())) { prevTokenIsDas = true; } continue; } if (i > 0 && isSalutation(tokens[i-1].getToken())) { // e.g. "Frau Stieg" could be a name, ignore continue; } final AnalyzedTokenReadings analyzedToken = tokens[i]; final String token = analyzedToken.getToken(); markLowerCaseNounErrors(ruleMatches, tokens, i, analyzedToken); boolean isBaseform = analyzedToken.getReadingsLength() >= 1 && analyzedToken.hasLemma(token); if ((analyzedToken.getAnalyzedToken(0).getPOSTag() == null || GermanHelper.hasReadingOfType(analyzedToken, GermanToken.POSType.VERB)) && isBaseform) { boolean nextTokenIsPersonalPronoun = false; if (i < tokens.length - 1) { // avoid false alarm for "Das haben wir getan." etc: nextTokenIsPersonalPronoun = tokens[i + 1].hasPartialPosTag("PRO:PER") || tokens[i + 1].getToken().equals("Sie"); if (tokens[i + 1].hasLemma("lassen")) { // avoid false alarm for "Ihr sollt mich das wissen lassen." continue; } if (tokens[i + 1].isSentenceEnd()) { // avoid false alarm for "So sollte das funktionieren." (might also remove true alarms...) continue; } } if (isPrevProbablyRelativePronoun(tokens, i)) { continue; } potentiallyAddLowercaseMatch(ruleMatches, tokens[i], prevTokenIsDas, token, nextTokenIsPersonalPronoun); } prevTokenIsDas = nounIndicators.contains(tokens[i].getToken().toLowerCase()); if (hasNounReading(analyzedToken)) { // it's the spell checker's task to check that nouns are uppercase continue; } AnalyzedTokenReadings lowercaseReadings = tagger.lookup(token.toLowerCase()); if (analyzedToken.getAnalyzedToken(0).getPOSTag() == null && lowercaseReadings == null) { continue; } if (analyzedToken.getAnalyzedToken(0).getPOSTag() == null && lowercaseReadings != null && lowercaseReadings.getAnalyzedToken(0).getPOSTag() == null) { continue; // unknown word, probably a name etc } potentiallyAddUppercaseMatch(ruleMatches, tokens, i, analyzedToken, token); } return toRuleMatchArray(ruleMatches); } @Override public List<DisambiguationPatternRule> getAntiPatterns() { return makeAntiPatterns(ANTI_PATTERNS, german); } private void markLowerCaseNounErrors(List<RuleMatch> ruleMatches, AnalyzedTokenReadings[] tokens, int i, AnalyzedTokenReadings analyzedToken) throws IOException { // commented out, too many false alarms... } // e.g. "Ein Kaninchen, das zaubern kann" - avoid false alarm here private boolean isPrevProbablyRelativePronoun(AnalyzedTokenReadings[] tokens, int i) { if (i >= 3) { AnalyzedTokenReadings prev1 = tokens[i-1]; AnalyzedTokenReadings prev2 = tokens[i-2]; AnalyzedTokenReadings prev3 = tokens[i-3]; if (prev1.getToken().equals("das") && prev2.getToken().equals(",") && prev3.matchesPosTagRegex("SUB:...:SIN:NEU")) { return true; } } return false; } private boolean isSalutation(String token) { return token.equals("Herr") || token.equals("Herrn") || token.equals("Frau"); } private boolean hasNounReading(AnalyzedTokenReadings readings) { try { AnalyzedTokenReadings allReadings = tagger.lookup(readings.getToken()); // unification in disambiguation.xml removes reading, so look up again if (allReadings != null) { for (AnalyzedToken reading : allReadings) { String posTag = reading.getPOSTag(); if (posTag != null && posTag.contains("SUB:") && !posTag.contains(":ADJ")) { return true; } } } } catch (IOException e) { throw new RuntimeException("Could not lookup " + readings.getToken(), e); } return false; } private void potentiallyAddLowercaseMatch(List<RuleMatch> ruleMatches, AnalyzedTokenReadings tokenReadings, boolean prevTokenIsDas, String token, boolean nextTokenIsPersonalPronoun) { if (prevTokenIsDas && !nextTokenIsPersonalPronoun) { // e.g. essen -> Essen if (Character.isLowerCase(token.charAt(0)) && !substVerbenExceptions.contains(token) && tokenReadings.hasPartialPosTag("VER:INF") && !tokenReadings.isIgnoredBySpeller() && !tokenReadings.isImmunized()) { final String msg = "Substantivierte Verben werden großgeschrieben."; final RuleMatch ruleMatch = new RuleMatch(this, tokenReadings.getStartPos(), tokenReadings.getEndPos(), msg); final String word = tokenReadings.getToken(); final String fixedWord = StringTools.uppercaseFirstChar(word); ruleMatch.setSuggestedReplacement(fixedWord); ruleMatches.add(ruleMatch); } } } private void potentiallyAddUppercaseMatch(List<RuleMatch> ruleMatches, AnalyzedTokenReadings[] tokens, int i, AnalyzedTokenReadings analyzedToken, String token) { if (Character.isUpperCase(token.charAt(0)) && token.length() > 1 && // length limit = ignore abbreviations !tokens[i].isIgnoredBySpeller() && !tokens[i].isImmunized() && !sentenceStartExceptions.contains(tokens[i - 1].getToken()) && !exceptions.contains(token) && !StringTools.isAllUppercase(token) && !isLanguage(i, tokens) && !isProbablyCity(i, tokens) && !GermanHelper.hasReadingOfType(analyzedToken, POSType.PROPER_NOUN) && !analyzedToken.isSentenceEnd() && !isEllipsis(i, tokens) && !isNumbering(i, tokens) && !isNominalization(i, tokens) && !isAdverbAndNominalization(i, tokens) && !isSpecialCase(i, tokens) && !isAdjectiveAsNoun(i, tokens) && !isExceptionPhrase(i, tokens)) { final String msg = "Außer am Satzanfang werden nur Nomen und Eigennamen großgeschrieben"; final RuleMatch ruleMatch = new RuleMatch(this, tokens[i].getStartPos(), tokens[i].getEndPos(), msg); final String word = tokens[i].getToken(); final String fixedWord = Character.toLowerCase(word.charAt(0)) + word.substring(1); ruleMatch.setSuggestedReplacement(fixedWord); ruleMatches.add(ruleMatch); } } // e.g. "a) bla bla" private boolean isNumbering(int i, AnalyzedTokenReadings[] tokens) { return i >= 2 && (tokens[i-1].getToken().equals(")") || tokens[i-1].getToken().equals("]")) && NUMERALS_EN.matcher(tokens[i-2].getToken()).matches(); } private boolean isEllipsis(int i, AnalyzedTokenReadings[] tokens) { return (tokens[i-1].getToken().equals("]") || tokens[i-1].getToken().equals(")")) && ((i == 4 && tokens[i-2].getToken().equals("…")) || (i == 6 && tokens[i-2].getToken().equals("."))); } private boolean isNominalization(int i, AnalyzedTokenReadings[] tokens) { String token = tokens[i].getToken(); AnalyzedTokenReadings nextReadings = i < tokens.length-1 ? tokens[i+1] : null; // TODO: wir finden den Fehler in "Die moderne Wissenschaftlich" nicht, weil nicht alle if (StringTools.startsWithUppercase(token) && !isNumber(token) && !hasNounReading(nextReadings) && !token.matches("Alle[nm]")) { AnalyzedTokenReadings prevToken = i > 0 ? tokens[i-1] : null; AnalyzedTokenReadings prevPrevToken = i >= 2 ? tokens[i-2] : null; AnalyzedTokenReadings prevPrevPrevToken = i >= 3 ? tokens[i-3] : null; String prevTokenStr = prevToken != null ? prevToken.getToken() : ""; if (prevToken != null && ("und".equals(prevTokenStr) || "oder".equals(prevTokenStr))) { if (prevPrevToken != null && tokens[i].hasPartialPosTag("SUB") && tokens[i].hasPartialPosTag(":ADJ")) { // "das dabei Erlernte und Erlebte ist ..." -> 'Erlebte' is correct here return true; } } return (prevToken != null && ("irgendwas".equals(prevTokenStr) || "aufs".equals(prevTokenStr) || "als".equals(prevTokenStr))) || hasPartialTag(prevToken, "PRO") || (hasPartialTag(prevPrevToken, "PRO", "PRP") && hasPartialTag(prevToken, "ADJ", "ADV", "PA2")) || (hasPartialTag(prevPrevPrevToken, "PRO", "PRP") && hasPartialTag(prevPrevToken, "ADJ", "ADV") && hasPartialTag(prevToken, "ADJ", "ADV", "PA2")); } return false; } private boolean isNumber(String token) { try { AnalyzedTokenReadings lookup = tagger.lookup(StringTools.lowercaseFirstChar(token)); return lookup != null && lookup.hasPosTag("ZAL"); } catch (IOException e) { throw new RuntimeException(e); } } private boolean isAdverbAndNominalization(int i, AnalyzedTokenReadings[] tokens) { String prevPrevToken = i > 1 ? tokens[i-2].getToken() : ""; AnalyzedTokenReadings prevToken = i > 0 ? tokens[i-1] : null; String token = tokens[i].getToken(); AnalyzedTokenReadings nextReadings = i < tokens.length-1 ? tokens[i+1] : null; // ignore "das wirklich Wichtige": return "das".equalsIgnoreCase(prevPrevToken) && hasPartialTag(prevToken, "ADV") && StringTools.startsWithUppercase(token) && !hasNounReading(nextReadings); } private boolean hasPartialTag(AnalyzedTokenReadings token, String... posTags) { if (token != null) { for (String posTag : posTags) { if (token.hasPartialPosTag(posTag)) { return true; } } } return false; } private boolean isSpecialCase(int i, AnalyzedTokenReadings[] tokens) { String prevToken = i > 1 ? tokens[i-1].getToken() : ""; String token = tokens[i].getToken(); AnalyzedTokenReadings nextReadings = i < tokens.length-1 ? tokens[i+1] : null; // ignore "im Allgemeinen gilt" but not "im Allgemeinen Fall": return "im".equalsIgnoreCase(prevToken) && "Allgemeinen".equals(token) && !hasNounReading(nextReadings); } private boolean isAdjectiveAsNoun(int i, AnalyzedTokenReadings[] tokens) { AnalyzedTokenReadings prevToken = i > 0 ? tokens[i-1] : null; boolean isPrevDeterminer = prevToken != null && (prevToken.hasPartialPosTag("ART") || prevToken.hasPartialPosTag("PRP")); if (!isPrevDeterminer) { return false; } AnalyzedTokenReadings nextReadings = i < tokens.length-1 ? tokens[i+1] : null; for (AnalyzedToken reading : tokens[i].getReadings()) { String posTag = reading.getPOSTag(); if (posTag != null && posTag.contains(":ADJ") && !hasNounReading(nextReadings)) { return true; } } return false; } private boolean isLanguage(int i, AnalyzedTokenReadings[] tokens) { String token = tokens[i].getToken(); boolean maybeLanguage = languages.contains(token) || languages.contains(token.replaceFirst("e$", "")) || languages.contains(token.replaceFirst("en$", "")); // z.B. "im Japanischen" AnalyzedTokenReadings prevToken = i > 0 ? tokens[i-1] : null; AnalyzedTokenReadings nextReadings = i < tokens.length-1 ? tokens[i+1] : null; return maybeLanguage && ((nextReadings != null && !hasNounReading(nextReadings)) || (prevToken != null && prevToken.getToken().equals("auf"))); } private boolean isProbablyCity(int i, AnalyzedTokenReadings[] tokens) { String token = tokens[i].getToken(); boolean hasCityPrefix = "Klein".equals(token) || "Groß".equals(token) || "Neu".equals(token); if (hasCityPrefix) { AnalyzedTokenReadings nextReadings = i < tokens.length-1 ? tokens[i+1] : null; return nextReadings != null && (!nextReadings.isTagged() || nextReadings.hasPartialPosTag("EIG")); } return false; } private boolean isExceptionPhrase(int i, AnalyzedTokenReadings[] tokens) { for (String phrase : myExceptionPhrases) { final String[] parts = phrase.split(" "); for (int j = 0; j < parts.length; j++) { if (tokens[i].getToken().matches(parts[j])) { final int startIndex = i-j; if (compareLists(tokens, startIndex, startIndex+parts.length-1, parts)) { return true; } } } } return false; } // non-private for tests boolean compareLists(AnalyzedTokenReadings[] tokens, int startIndex, int endIndex, String[] parts) { if (startIndex < 0) { return false; } int i = 0; for (int j = startIndex; j <= endIndex; j++) { if (i >= parts.length || j >= tokens.length) { return false; } if (!tokens[j].getToken().matches(parts[i])) { return false; } i++; } return true; } @Override public void reset() { // nothing } }
package org.languagetool; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import junit.framework.TestCase; import org.languagetool.JLanguageTool.ParagraphHandling; import org.languagetool.language.AmericanEnglish; import org.languagetool.language.BritishEnglish; import org.languagetool.language.English; import org.languagetool.rules.Category; import org.languagetool.rules.Rule; import org.languagetool.rules.RuleMatch; import org.languagetool.rules.patterns.Element; import org.languagetool.rules.patterns.PatternRule; public class JLanguageToolTest extends TestCase { public void demoCodeForHomepage() throws IOException { JLanguageTool langTool = new JLanguageTool(new BritishEnglish()); langTool.activateDefaultPatternRules(); List<RuleMatch> matches = langTool.check("A sentence " + "with a error in the Hitchhiker's Guide tot he Galaxy"); for (RuleMatch match : matches) { System.out.println("Potential error at line " + match.getLine() + ", column " + match.getColumn() + ": " + match.getMessage()); System.out.println("Suggested correction: " + match.getSuggestedReplacements()); } } public void spellCheckerDemoCodeForHomepage() throws IOException { JLanguageTool langTool = new JLanguageTool(new BritishEnglish()); for (Rule rule : langTool.getAllRules()) { if (!rule.isDictionaryBasedSpellingRule()) { langTool.disableRule(rule.getId()); } } List<RuleMatch> matches = langTool.check("A speling error"); for (RuleMatch match : matches) { System.out.println("Potential typo at line " + match.getLine() + ", column " + match.getColumn() + ": " + match.getMessage()); System.out.println("Suggested correction(s): " + match.getSuggestedReplacements()); } } public void testEnglish() throws IOException { final JLanguageTool tool = new JLanguageTool(new English()); assertEquals(0, tool.check("A test that should not give errors.").size()); //more error-free sentences to deal with possible regressions assertEquals(0, tool.check("As long as you have hope, a chance remains.").size()); assertEquals(0, tool.check("A rolling stone gathers no moss.").size()); assertEquals(0, tool.check("Hard work causes fitness.").size()); assertEquals(0, tool.check("Gershwin overlays the slow blues theme from section B in the final “Grandioso.”").size()); assertEquals(0, tool.check("Making ingroup membership more noticeable increases cooperativeness.").size()); assertEquals(0, tool.check("Dog mushing is more of a sport than a true means of transportation.").size()); assertEquals(0, tool.check("No one trusts him any more.").size()); assertEquals(0, tool.check("A member of the United Nations since 1992, Azerbaijan was elected to membership in the newly established Human Rights Council by the United Nations General Assembly on May 9, 2006 (the term of office began on June 19, 2006).").size()); assertEquals(0, tool.check("Anatomy and geometry are fused in one, and each does something to the other.").size()); assertEquals(0, tool.check("Certain frogs that lay eggs underground have unpigmented eggs.").size()); assertEquals(0, tool.check("It's a kind of agreement in which each party gives something to the other, Jack said.").size()); assertEquals(0, tool.check("Later, you shall know it better.").size()); assertEquals(0, tool.check("And the few must win what the many lose, for the opposite arrangement would not support markets as we know them at all, and is, in fact, unimaginable.").size()); assertEquals(0, tool.check("He explained his errand, but without bothering much to make it plausible, for he felt something well up in him which was the reason why he had fled the army.").size()); assertEquals(0, tool.check("I think it's better, and it's not a big deal.").size()); assertEquals(1, tool.check("A test test that should give errors.").size()); assertEquals(0, tool.check("I can give you more a detailed description.").size()); assertEquals(12, tool.getAllRules().size()); tool.activateDefaultPatternRules(); assertEquals(0, tool.check("The sea ice is highly variable - frozen solid during cold, calm weather and broke...").size()); assertTrue(tool.getAllRules().size() > 3); assertEquals(1, tool.check("I can give you more a detailed description.").size()); tool.disableRule("MORE_A_JJ"); assertEquals(0, tool.check("I can give you more a detailed description.").size()); assertEquals(1, tool.check("I've go to go.").size()); tool.disableCategory("Possible Typo"); assertEquals(0, tool.check("I've go to go.").size()); } public void testPositionsWithEnglish() throws IOException { final JLanguageTool tool = new JLanguageTool(new AmericanEnglish()); final List<RuleMatch> matches = tool.check("A sentence with no period\n" + "A sentence. A typoh."); assertEquals(1, matches.size()); final RuleMatch match = matches.get(0); assertEquals(1, match.getLine()); assertEquals(15, match.getColumn()); } public void testPositionsWithEnglishTwoLineBreaks() throws IOException { final JLanguageTool tool = new JLanguageTool(new AmericanEnglish()); final List<RuleMatch> matches = tool.check("This sentence.\n\n" + "A sentence. A typoh."); assertEquals(1, matches.size()); final RuleMatch match = matches.get(0); assertEquals(2, match.getLine()); assertEquals(14, match.getColumn()); // TODO: should actually be 15, as in testPositionsWithEnglish() } public void testAnalyzedSentence() throws IOException { final JLanguageTool tool = new JLanguageTool(new English()); //test soft-hyphen ignoring: assertEquals("<S> This[this/DT,B-NP-singular|E-NP-singular] " + "is[be/VBZ,B-VP] a[a/DT,B-NP-singular] " + "test­ed[tested/JJ,test/VBD,test/VBN,test­ed/null,I-NP-singular] " + "sentence[sentence/NN,E-NP-singular].[./.,</S>,O]", tool.getAnalyzedSentence("This is a test\u00aded sentence.").toString()); //test paragraph ends adding assertEquals("<S> </S><P/> ", tool.getAnalyzedSentence("\n").toString()); } public void testParagraphRules() throws IOException { final JLanguageTool tool = new JLanguageTool(new English()); //run normally List<RuleMatch> matches = tool.check("(This is an quote.\n It ends in the second sentence."); assertEquals(2, matches.size()); assertEquals(2, tool.getSentenceCount()); //run in a sentence-only mode matches = tool.check("(This is an quote.\n It ends in the second sentence.", false, ParagraphHandling.ONLYNONPARA); assertEquals(1, matches.size()); assertEquals("EN_A_VS_AN", matches.get(0).getRule().getId()); assertEquals(1, tool.getSentenceCount()); //run in a paragraph mode - single sentence matches = tool.check("(This is an quote.\n It ends in the second sentence.", false, ParagraphHandling.ONLYPARA); assertEquals(1, matches.size()); assertEquals("EN_UNPAIRED_BRACKETS", matches.get(0).getRule().getId()); assertEquals(1, tool.getSentenceCount()); //run in a paragraph mode - many sentences matches = tool.check("(This is an quote.\n It ends in the second sentence.", true, ParagraphHandling.ONLYPARA); assertEquals(1, matches.size()); assertEquals("EN_UNPAIRED_BRACKETS", matches.get(0).getRule().getId()); assertEquals(2, tool.getSentenceCount()); } public void testWhitespace() throws IOException { final JLanguageTool tool = new JLanguageTool(new English()); final AnalyzedSentence raw = tool.getRawAnalyzedSentence("Let's do a \"test\", do you understand?"); final AnalyzedSentence cooked = tool.getAnalyzedSentence("Let's do a \"test\", do you understand?"); //test if there was a change assertFalse(raw.equals(cooked)); //see if nothing has been deleted assertEquals(raw.getTokens().length, cooked.getTokens().length); int i = 0; for (final AnalyzedTokenReadings atr : raw.getTokens()) { assertEquals(atr.isWhitespaceBefore(), cooked.getTokens()[i].isWhitespaceBefore()); i++; } } public void testOverlapFilter() throws IOException { final Category category = new Category("test category"); final List<Element> elements1 = Arrays.asList(new Element("one", true, false, false)); final PatternRule rule1 = new PatternRule("id1", new English(), elements1, "desc1", "msg1", "shortMsg1"); rule1.setSubId("1"); rule1.setCategory(category); final List<Element> elements2 = Arrays.asList(new Element("one", true, false, false), new Element("two", true, false, false)); final PatternRule rule2 = new PatternRule("id1", new English(), elements2, "desc2", "msg2", "shortMsg2"); rule2.setSubId("2"); rule2.setCategory(category); final JLanguageTool tool = new JLanguageTool(new English()); tool.addRule(rule1); tool.addRule(rule2); final List<RuleMatch> ruleMatches1 = tool.check("And one two three."); assertEquals("one overlapping rule must be filtered out", 1, ruleMatches1.size()); assertEquals("msg1", ruleMatches1.get(0).getMessage()); final String sentence = "And one two three."; final AnalyzedSentence analyzedSentence = tool.getAnalyzedSentence(sentence); final List<Rule> bothRules = new ArrayList<Rule>(Arrays.asList(rule1, rule2)); final List<RuleMatch> ruleMatches2 = tool.checkAnalyzedSentence(ParagraphHandling.NORMAL, bothRules, 0, 0, 0, sentence, analyzedSentence); assertEquals("one overlapping rule must be filtered out", 1, ruleMatches2.size()); assertEquals("msg1", ruleMatches2.get(0).getMessage()); } }
package com.matthewtamlin.spyglass.library.meta_annotations; import com.matthewtamlin.spyglass.library.default_processors.DefaultProcessor; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.ANNOTATION_TYPE) public @interface Default { Class<? extends DefaultProcessor> processorClass(); }
package org.eluder.logback.ext.core; import ch.qos.logback.core.Context; import ch.qos.logback.core.Layout; import ch.qos.logback.core.UnsynchronizedAppenderBase; import ch.qos.logback.core.encoder.Encoder; import ch.qos.logback.core.encoder.LayoutWrappingEncoder; import com.google.common.io.BaseEncoding; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.charset.Charset; import java.util.concurrent.locks.ReentrantLock; import static java.lang.String.format; public abstract class EncodingStringAppender<E> extends UnsynchronizedAppenderBase<E> { protected final ReentrantLock lock = new ReentrantLock(true); private Charset charset = Charset.forName("UTF-8"); private boolean binary; private Encoder<E> encoder; public final void setCharset(Charset charset) { if (encoder instanceof LayoutWrappingEncoder) { ((LayoutWrappingEncoder) encoder).setCharset(charset); } else if (encoder instanceof CharacterEncoder) { ((CharacterEncoder<?>) encoder).setCharset(charset); } this.charset = charset; } public final void setBinary(boolean binary) { if (binary) { addInfo(format("Appender '%s' is set to binary mode, events are converted to Base64 strings", getName())); } this.binary = binary; } public final void setEncoder(Encoder<E> encoder) { this.encoder = encoder; setContext(context); setCharset(charset); } public final void setLayout(Layout<E> layout) { LayoutWrappingEncoder<E> enc = new LayoutWrappingEncoder<E>(); enc.setLayout(layout); setEncoder(enc); } @Override public void setContext(Context context) { if (encoder != null) { encoder.setContext(context); } super.setContext(context); } protected final Charset getCharset() { return charset; } protected final boolean isBinary() { return binary; } protected final Encoder<E> getEncoder() { return encoder; } @Override public void start() { if (encoder == null) { addError(format("Encoder not set for appender '%s'", getName())); return; } lock.lock(); try { encoder.start(); super.start(); } finally { lock.unlock(); } } @Override public void stop() { lock.lock(); try { super.stop(); if (encoder != null) { encoder.stop(); } } finally { lock.unlock(); } } @Override protected void append(E event) { ByteArrayOutputStream stream = new ByteArrayOutputStream(); encode(event, stream); doHandle(event, convert(stream.toByteArray())); } private void encode(E event, ByteArrayOutputStream stream) { lock.lock(); try { encoderInit(stream); try { doEncode(event); } finally { encoderClose(); } } finally { lock.unlock(); } } protected abstract void handle(E event, String encoded) throws Exception; protected String convert(byte[] payload) { if (binary) { return BaseEncoding.base64().encode(payload); } else { return new String(payload, charset); } } protected void doHandle(E event, String encoded) { try { if (encoded != null) { handle(event, encoded); } } catch (Exception ex) { this.started = false; addError(format("Failed to handle logging event for '%s'", getName()), ex); } } protected void doEncode(E event) { try { encoder.doEncode(event); } catch (IOException ex) { this.started = false; addError(format("Failed to encode logging event for appender '%s'", getName()), ex); } } protected void encoderInit(ByteArrayOutputStream stream) { try { encoder.init(stream); } catch (IOException ex) { this.started = false; addError(format("Failed to initialize encoder for appender '%s'", getName()), ex); } } protected void encoderClose() { try { encoder.close(); } catch (Exception ex) { this.started = false; addError(format("Failed to close encoder for appender '%s'", getName()), ex); } } }
package com.intellibins.recycle; import com.intellibins.recycle.activity.MapsActivity; import com.intellibins.recycle.activity.OnboardingActivity; import com.intellibins.recycle.activity.SplashActivity; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.Robolectric; import org.robolectric.RobolectricTestRunner; import org.robolectric.annotation.Config; import org.robolectric.shadows.ShadowActivity; import android.app.Activity; import android.content.ComponentName; import android.graphics.drawable.ColorDrawable; import static org.hamcrest.core.IsEqual.equalTo; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; @Config(emulateSdk = 18) @RunWith(RobolectricTestRunner.class) public class SplashActivityRobolectricTest { private Activity mActivity; @Before public void setup() { mActivity = Robolectric.setupActivity(SplashActivity.class); } @Test public void testActivityFound() { assertNotNull(mActivity); } @Test public void testSplashScreenBackgroundColor() throws Exception { int color = ((ColorDrawable) mActivity .findViewById(R.id.layout_splash) .getBackground()) .getColor(); int primary = mActivity.getResources().getColor(R.color.primary); assertThat(color, equalTo(primary)); } @Test public void testNextStartedActivity() { boolean firstRun = SharedPreferencesHelperFactory.get() .isFirstRun(Robolectric.application.getApplicationContext()); Class<?> clazz = firstRun ? OnboardingActivity.class : MapsActivity.class; ShadowActivity shadowActivity = Robolectric.shadowOf(mActivity); assertThat(shadowActivity.peekNextStartedActivity().getComponent(), equalTo(new ComponentName(mActivity, clazz))); } }
package com.namelessmc.namelessplugin.sponge; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.StandardCopyOption; import org.slf4j.Logger; import org.spongepowered.api.Game; import org.spongepowered.api.Server; import org.spongepowered.api.Sponge; import org.spongepowered.api.command.CommandManager; import org.spongepowered.api.command.args.GenericArguments; import org.spongepowered.api.command.spec.CommandSpec; import org.spongepowered.api.event.Listener; import org.spongepowered.api.event.game.state.GameInitializationEvent; import org.spongepowered.api.event.game.state.GameStoppingEvent; import org.spongepowered.api.plugin.Plugin; import org.spongepowered.api.text.Text; import org.spongepowered.api.text.format.TextColors; import com.google.inject.Inject; import com.namelessmc.namelessplugin.sponge.commands.GetUserCommand; import com.namelessmc.namelessplugin.sponge.commands.RegisterCommand; import com.namelessmc.namelessplugin.sponge.commands.ReportCommand; import com.namelessmc.namelessplugin.sponge.mcstats.Metrics; import com.namelessmc.namelessplugin.sponge.utils.PluginInfo; import ninja.leaping.configurate.commented.CommentedConfigurationNode; import ninja.leaping.configurate.hocon.HoconConfigurationLoader; import ninja.leaping.configurate.loader.ConfigurationLoader; /* * Sponge Version by Lmmb74 */ @Plugin(id = PluginInfo.ID, name = PluginInfo.NAME, version = PluginInfo.VERSION) public class NamelessPlugin { private static NamelessPlugin instance; CommandManager cmdManager = Sponge.getCommandManager(); @Inject private Logger logger; @Inject Game game; /* * Metrics */ Metrics metrics; /* * API URL */ private String apiURL = ""; public final String permission = "namelessmc"; public final String permissionAdmin = "namelessmc.admin"; /* * Configuration */ private String directory; private ConfigurationLoader<CommentedConfigurationNode> configManager; private CommentedConfigurationNode configNode; public static NamelessPlugin getInstance(){ return instance; } public Logger getLogger() { return this.logger; } public String getAPIUrl() { return this.apiURL; } public Game getGame(){ return this.game; } public CommentedConfigurationNode getConfig(){ return this.configNode; } public void runTaskAsynchronously(Runnable task) { Sponge.getScheduler().createTaskBuilder().execute(task).async().submit(this); } public Server getServer(){ return this.game.getServer(); } @Listener public void onInitialize(GameInitializationEvent event) throws Exception { directory = Sponge.getGame().getConfigManager().getPluginConfig(this).getDirectory().toString(); initConfig(); apiURL = getConfig().getNode("api-url").getString(); registerListeners(); } @Listener public void onStop(GameStoppingEvent event) throws Exception { getGame().getEventManager().unregisterPluginListeners(this); } /* * Configuration Initialization */ public void initConfig() throws IOException { File dir = new File(directory); if (!dir.exists()){ dir.mkdirs(); } File config = new File(directory + File.separator + "config.conf"); if (!config.exists()){ config.createNewFile(); Files.copy(this.getClass().getResource("config.conf").openStream(), config.getAbsoluteFile().toPath(), StandardCopyOption.REPLACE_EXISTING); } configManager = HoconConfigurationLoader.builder().setPath(config.toPath()).build(); configNode = configManager.load(); } /* * Register Commands/Events */ public void registerListeners(){ // Register Metrics try { this.metrics = new Metrics(this); this.metrics.start(); getLogger().info(Text.of(TextColors.AQUA, "Metrics Started!").toPlain()); } catch (IOException e) { e.printStackTrace(); } // Register commands CommandSpec getuserCMD = CommandSpec.builder() .description(Text.of("GetUser Command")) .arguments(GenericArguments.onlyOne(GenericArguments.string(Text.of("player")))) .executor(new GetUserCommand()) .build(); CommandSpec registerCMD = CommandSpec.builder() .description(Text.of("Register Command")) .arguments(GenericArguments.onlyOne(GenericArguments.string(Text.of("e-mail")))) .executor(new RegisterCommand()) .build(); cmdManager.register(this, getuserCMD, "getuser"); cmdManager.register(this, registerCMD, "register"); if (getConfig().getNode("enable-reports").getBoolean()){ CommandSpec reportCMD = CommandSpec.builder() .description(Text.of("Report Command")) .arguments(GenericArguments.seq(GenericArguments.onlyOne(GenericArguments.string(Text.of("player"))), GenericArguments.remainingJoinedStrings(Text.of("reason")))) .executor(new ReportCommand()) .build(); cmdManager.register(this, reportCMD, "report"); } else { return; } } }
package net.echinopsii.ariane.community.messaging.nats; import io.nats.client.Connection; import io.nats.client.Message; import io.nats.client.SyncSubscription; import net.echinopsii.ariane.community.messaging.api.AppMsgWorker; import net.echinopsii.ariane.community.messaging.api.MomMsgTranslator; import net.echinopsii.ariane.community.messaging.api.MomRequestExecutor; import net.echinopsii.ariane.community.messaging.common.MomAkkaAbsRequestExecutor; import net.echinopsii.ariane.community.messaging.common.MomLoggerFactory; import org.slf4j.Logger; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; public class RequestExecutor extends MomAkkaAbsRequestExecutor implements MomRequestExecutor<String, AppMsgWorker> { private static final Logger log = MomLoggerFactory.getLogger(RequestExecutor.class); private HashMap<String, HashMap<String, SyncSubscription>> sessionsRPCSubs = new HashMap<>(); private HashMap<String, Boolean> destinationTrace = new HashMap<>(); public RequestExecutor(Client client) throws IOException { super(client); } @Override public Map<String, Object> fireAndForget(Map<String, Object> request, String destination) { String groupID = super.getMomClient().getCurrentMsgGroup(); if (groupID!=null) destination = groupID + "-" + destination; Message message = new MsgTranslator().encode(request); message.setSubject(destination); try { ((Connection)super.getMomClient().getConnection()).publish(message); } catch (IOException e) { e.printStackTrace(); } return request; } @Override public Map<String, Object> RPC(Map<String, Object> request, String destination, String replySource, AppMsgWorker answerCB) throws TimeoutException { Map<String, Object> response = null; String groupID = super.getMomClient().getCurrentMsgGroup(); if (groupID!=null) { destination = groupID + "-" + destination; if (replySource==null) replySource = destination + "-RET"; } if (destinationTrace.get(destination)==null) destinationTrace.put(destination, false); if (destinationTrace.get(destination)) request.put(MomMsgTranslator.MSG_RETRY_COUNT,true); else request.remove(MomMsgTranslator.MSG_RETRY_COUNT); Message message = new MsgTranslator().encode(request); message.setSubject(destination); if (replySource!=null) message.setReplyTo(replySource); try { Message msgResponse = null; long beginWaitingAnswer = 0; if (replySource==null) { msgResponse = ((Connection) super.getMomClient().getConnection()).request( message.getSubject(), message.getData(), super.getMomClient().getRPCTimout(), TimeUnit.SECONDS ); } else { String corrId; synchronized (UUID.class) { corrId = UUID.randomUUID().toString(); } request.put(MsgTranslator.MSG_CORRELATION_ID, corrId); SyncSubscription subs; if (groupID!=null) { if (sessionsRPCSubs.get(groupID) != null) { if (sessionsRPCSubs.get(groupID).get(replySource) != null) subs = sessionsRPCSubs.get(groupID).get(replySource); else { subs = ((Connection)super.getMomClient().getConnection()).subscribeSync(replySource); sessionsRPCSubs.get(groupID).put(replySource, subs); } } else { HashMap<String, SyncSubscription> groupSubs = new HashMap<>(); subs = ((Connection)super.getMomClient().getConnection()).subscribeSync(replySource); groupSubs.put(replySource, subs); sessionsRPCSubs.put(groupID, groupSubs); } } else subs = ((Connection)super.getMomClient().getConnection()).subscribeSync(replySource); ((Connection) super.getMomClient().getConnection()).publish(message); long rpcTimeout = super.getMomClient().getRPCTimout() * 1000000000; beginWaitingAnswer = System.nanoTime(); while(msgResponse==null && rpcTimeout >= 0) { try { msgResponse = subs.nextMessage(rpcTimeout, TimeUnit.NANOSECONDS); if (msgResponse!=null) { String responseCorrID = (String) new MsgTranslator().decode(msgResponse).get(MsgTranslator.MSG_CORRELATION_ID); if (responseCorrID != null && !responseCorrID.equals(corrId)) { log.warn("Response discarded ( " + responseCorrID + " ) ..."); msgResponse = null; } } } catch (InterruptedException | TimeoutException ex) { log.debug("Thread interrupted while waiting for RPC answer..."); } finally { if (super.getMomClient().getRPCTimout()>0) rpcTimeout = super.getMomClient().getRPCTimout()*1000000000 - (System.nanoTime()-beginWaitingAnswer); else rpcTimeout = 0; } } if (groupID==null) subs.close(); } if (msgResponse!=null) { long endWaitingAnswer = System.nanoTime(); long rpcTime = endWaitingAnswer - beginWaitingAnswer; log.debug("RPC time : " + rpcTime); if (rpcTime > super.getMomClient().getRPCTimout()*1000000000*3/5) { destinationTrace.put(destination, true); log.warn("Slow RPC time () on request to queue " + destination); } else destinationTrace.put(destination, false); response = new MsgTranslator().decode(msgResponse); } else { log.warn("No response returned from request on " + destination + " queue after " + super.getMomClient().getRPCTimout() + " sec..."); if (request.containsKey(MomMsgTranslator.MSG_RETRY_COUNT)) { int retryCount = (int)request.get(MomMsgTranslator.MSG_RETRY_COUNT); if ((retryCount - super.getMomClient().getRPCRetry()) > 0) { request.put(MomMsgTranslator.MSG_RETRY_COUNT, retryCount++); destinationTrace.put(destination, true); log.warn("Retry (" + request.get(MomMsgTranslator.MSG_RETRY_COUNT) + ")"); return this.RPC(request, destination, replySource, answerCB); } else throw new TimeoutException( "No response returned from request on " + destination + " queue after " + super.getMomClient().getRPCTimout() + " sec..." ); } else { request.put(MomMsgTranslator.MSG_RETRY_COUNT, 1); destinationTrace.put(destination, true); log.warn("Retry (" + request.get(MomMsgTranslator.MSG_RETRY_COUNT) + ")"); return this.RPC(request, destination, replySource, answerCB); } } } catch (Exception e) { e.printStackTrace(); } if (answerCB!=null) response = answerCB.apply(response); return response; } public void cleanGroupReqResources(String groupID) { if (this.sessionsRPCSubs.get(groupID)!=null) { for (String replySource : this.sessionsRPCSubs.get(groupID).keySet()) this.sessionsRPCSubs.get(groupID).get(replySource).close(); this.sessionsRPCSubs.get(groupID).clear(); this.sessionsRPCSubs.remove(groupID); } } public void stop() { } }
package org.nuxeo.ecm.restapi.test; import static org.junit.Assert.*; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.ws.rs.core.Response; import org.apache.commons.lang.StringUtils; import org.codehaus.jackson.JsonNode; import org.junit.Test; import org.junit.runner.RunWith; import org.nuxeo.ecm.automation.io.services.enricher .ContentEnricherServiceImpl; import org.nuxeo.ecm.core.api.DocumentModel; import org.nuxeo.ecm.core.api.IdRef; import org.nuxeo.ecm.core.api.VersioningOption; import org.nuxeo.ecm.core.test.annotations.Granularity; import org.nuxeo.ecm.core.test.annotations.RepositoryConfig; import org.nuxeo.ecm.restapi.jaxrs.io.RestConstants; import org.nuxeo.ecm.restapi.jaxrs.io.documents.ACPWriter; import org.nuxeo.runtime.test.runner.Features; import org.nuxeo.runtime.test.runner.FeaturesRunner; import org.nuxeo.runtime.test.runner.Jetty; import com.sun.jersey.api.client.ClientResponse; /** * Test the CRUD rest API * * @since 5.7.2 */ @RunWith(FeaturesRunner.class) @Features({ RestServerFeature.class }) @Jetty(port = 18090) @RepositoryConfig(cleanup = Granularity.METHOD, init = RestServerInit.class) public class DocumentBrowsingTest extends BaseTest { @Test public void iCanBrowseTheRepoByItsPath() throws Exception { // Given an existing document DocumentModel note = RestServerInit.getNote(0, session); // When i do a GET Request ClientResponse response = getResponse(RequestType.GET, "path" + note.getPathAsString()); // Then i get a document assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()); assertEntityEqualsDoc(response.getEntityInputStream(), note); } @Test public void iCanBrowseTheRepoByItsId() throws Exception { // Given a document DocumentModel note = RestServerInit.getNote(0, session); // When i do a GET Request ClientResponse response = getResponse(RequestType.GET, "id/" + note.getId()); // The i get the document as Json assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()); assertEntityEqualsDoc(response.getEntityInputStream(), note); } @Test public void iCanGetTheChildrenOfADoc() throws Exception { // Given a folder with one document DocumentModel folder = RestServerInit.getFolder(0, session); DocumentModel child = session.createDocumentModel( folder.getPathAsString(), "note", "Note"); child = session.createDocument(child); session.save(); // When i call a GET on the children for that doc ClientResponse response = getResponse(RequestType.GET, "id/" + folder.getId() + "/@children"); // Then i get the only document of the folder assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()); JsonNode node = mapper.readTree(response.getEntityInputStream()); Iterator<JsonNode> elements = node.get("entries").getElements(); node = elements.next(); assertNodeEqualsDoc(node, child); } @Test public void iCanUpdateADocument() throws Exception { // Given a document DocumentModel note = RestServerInit.getNote(0, session); ClientResponse response = getResponse(RequestType.GET, "id/" + note.getId()); assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()); // When i do a PUT request on the document with modified data JSONDocumentNode jsonDoc = new JSONDocumentNode( response.getEntityInputStream()); jsonDoc.setPropertyValue("dc:title", "New title"); response = getResponse(RequestType.PUT, "id/" + note.getId(), jsonDoc.asJson()); // Then the document is updated fetchInvalidations(); note = RestServerInit.getNote(0, session); assertEquals("New title", note.getTitle()); } @Test public void iCanUpdateDocumentVersion() throws Exception { // Given a document DocumentModel note = RestServerInit.getNote(0, session); ClientResponse response = getResponse(RequestType.GET, "id/" + note.getId()); assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()); // Check the current version of the live document assertEquals("0.0", note.getVersionLabel()); // When i do a PUT request on the document with modified version in the header JSONDocumentNode jsonDoc = new JSONDocumentNode( response.getEntityInputStream()); Map<String,String> headers = new HashMap<>(); headers.put(RestConstants.X_VERSIONING_OPTION, VersioningOption.MAJOR.toString()); response = getResponse(RequestType.PUT, "id/" + note.getId(), jsonDoc.asJson(), headers); // Check if the version of the document has been returned JsonNode node = mapper.readTree(response.getEntityInputStream()); assertEquals("1.0", node.get("versionLabel").getValueAsText()); // Check if the original document is still not versioned. note = RestServerInit.getNote(0, session); assertEquals("0.0", note.getVersionLabel()); } @Test public void itCanUpdateADocumentWithoutSpecifyingIdInJSONPayload() throws Exception { // Given a document DocumentModel note = RestServerInit.getNote(0, session); ClientResponse response = getResponse(RequestType.GET, "path" + note.getPathAsString()); assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()); // When i do a PUT request on the document with modified data response = getResponse(RequestType.PUT, "id/" + note.getId(), "{\"entity-type\":\"document\",\"properties\":{\"dc:title\":\"Other New title\"}}"); // Then the document is updated fetchInvalidations(); note = RestServerInit.getNote(0, session); assertEquals("Other New title", note.getTitle()); } @Test public void iCanCreateADocument() throws Exception { // Given a folder and a Rest Creation request DocumentModel folder = RestServerInit.getFolder(0, session); String data = "{\"entity-type\": \"document\",\"type\": \"File\",\"name\":\"newName\",\"properties\": {\"dc:title\":\"My title\"}}"; ClientResponse response = getResponse(RequestType.POST, "path" + folder.getPathAsString(), data); assertEquals(Response.Status.CREATED.getStatusCode(), response.getStatus()); // Then the create document is returned JsonNode node = mapper.readTree(response.getEntityInputStream()); assertEquals("My title", node.get("title").getValueAsText()); String id = node.get("uid").getValueAsText(); assertTrue(StringUtils.isNotBlank(id)); // Then a document is created in the database fetchInvalidations(); DocumentModel doc = session.getDocument(new IdRef(id)); assertEquals(folder.getPathAsString() + "/newName", doc.getPathAsString()); assertEquals("My title", doc.getTitle()); assertEquals("File", doc.getType()); } @Test public void iCanDeleteADocument() throws Exception { // Given a document DocumentModel folder = RestServerInit.getFolder(1, session); DocumentModel doc = RestServerInit.getNote(0, session); // When I do a DELETE request ClientResponse response = getResponse(RequestType.DELETE, "path" + doc.getPathAsString()); assertEquals(Response.Status.NO_CONTENT.getStatusCode(), response.getStatus()); fetchInvalidations(); // Then the doc is deleted assertTrue(!session.exists(doc.getRef())); } @Test public void iCanChooseAnotherRepositoryName() throws Exception { // Given an existing document DocumentModel note = RestServerInit.getNote(0, session); // When i do a GET Request on the note repository ClientResponse response = getResponse( RequestType.GET, "repo/" + note.getRepositoryName() + "/path" + note.getPathAsString()); // Then i get a document assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()); assertEntityEqualsDoc(response.getEntityInputStream(), note); // When i do a GET Request on a non existent repository response = getResponse(RequestType.GET, "repo/nonexistentrepo/path" + note.getPathAsString()); // Then i receive a 404 assertEquals(Response.Status.NOT_FOUND.getStatusCode(), response.getStatus()); } @Test public void iCanGetTheACLsOnADocumentThroughAdapter() throws Exception { // Given an existing document DocumentModel note = RestServerInit.getNote(0, session); // When i do a GET Request on the note repository ClientResponse response = getResponse( RequestType.GET, "repo/" + note.getRepositoryName() + "/path" + note.getPathAsString() + "/@acl"); // Then i get a the ACL assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()); JsonNode node = mapper.readTree(response.getEntityInputStream()); assertEquals(ACPWriter.ENTITY_TYPE, node.get("entity-type").getValueAsText()); } @Test public void iCanGetTheACLsOnADocumentThroughContributor() throws Exception { // Given an existing document DocumentModel note = RestServerInit.getNote(0, session); Map<String,String> headers = new HashMap<>(); headers.put(ContentEnricherServiceImpl.NXCONTENT_CATEGORY_HEADER, "acls"); // When i do a GET Request on the note repository ClientResponse response = getResponse( RequestType.GET, "repo/" + note.getRepositoryName() + "/path" + note.getPathAsString(),headers); // Then i get a the ACL assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()); JsonNode node = mapper.readTree(response.getEntityInputStream()); assertEquals("inherited", node.get(RestConstants .CONTRIBUTOR_CTX_PARAMETERS).get("acls").get(0).get ("name").getTextValue()); } @Test public void iCanGetTheThumbnailOfADocumentThroughContributor() throws Exception { //TODO NXP-14793: Improve testing by adding thumbnail conversion // Attach a blob //Blob blob = new InputStreamBlob(DocumentBrowsingTest.class.getResource( //"/test-data/png.png").openStream(), "image/png", //null, "logo.png", null); //DocumentModel file = RestServerInit.getFile(0, session); //file.setPropertyValue("file:content", (Serializable) blob); //file = session.saveDocument(file); //session.save(); //ClientResponse response = getResponse( //RequestType.GET, //"repo/" + file.getRepositoryName() + "/path" //+ file.getPathAsString(), headers); // Then i get an entry for thumbnail //assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()); //JsonNode node = mapper.readTree(response.getEntityInputStream()); //assertEquals("specificUrl", node.get(RestConstants //.CONTRIBUTOR_CTX_PARAMETERS).get("thumbnail").get //("thumbnailUrl").getTextValue()); Map<String, String> headers = new HashMap<>(); headers.put(ContentEnricherServiceImpl.NXCONTENT_CATEGORY_HEADER, "thumbnail"); // Given an existing document DocumentModel note = RestServerInit.getNote(0, session); // When i do a GET Request on the note without any image ClientResponse response = getResponse(RequestType.GET, "repo/" + note.getRepositoryName() + "/path" + note.getPathAsString(), headers); // Then i get no result for valid thumbnail url as expected but still // thumbnail entry from the contributor assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()); JsonNode node = mapper.readTree(response.getEntityInputStream()); assertEquals(null, node.get(RestConstants .CONTRIBUTOR_CTX_PARAMETERS).get("thumbnail").get ("url").getTextValue()); } @Test public void itCanBrowseDocumentWithSpacesInPath() throws Exception { DocumentModel folder = RestServerInit.getFolder(0, session); DocumentModel note = session.createDocumentModel( folder.getPathAsString(), "doc with space", "Note"); note = session.createDocument(note); session.save(); // When i do a GET Request on the note repository ClientResponse response = getResponse(RequestType.GET, "repo/" + note.getRepositoryName() + "/path" + note.getPathAsString().replace(" ", "%20")); // Then i get a the ACL assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()); // When i do a GET Request on the note repository response = getResponse( RequestType.GET, "repo/" + note.getRepositoryName() + "/path" + note.getPathAsString()); // Then i get a the ACL assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()); } @Test public void itCanModifyArrayTypes() throws Exception { // Given a document DocumentModel note = RestServerInit.getNote(0, session); ClientResponse response = getResponse(RequestType.GET, "id/" + note.getId()); assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()); // When i do a PUT request on the document with modified data JSONDocumentNode jsonDoc = new JSONDocumentNode( response.getEntityInputStream()); jsonDoc.setPropertyValue("dc:title", "New title"); jsonDoc.setPropertyArray("dc:contributors", "me", "you", "them", "everybody"); response = getResponse(RequestType.PUT, "id/" + note.getId(), jsonDoc.asJson()); // Then the document is updated fetchInvalidations(); note = RestServerInit.getNote(0, session); assertEquals("New title", note.getTitle()); List<String> contributors = Arrays.asList((String[]) note.getPropertyValue("dc:contributors")); assertTrue(contributors.contains("me")); assertTrue(contributors.contains("you")); assertTrue(contributors.contains("them")); assertTrue(contributors.contains("everybody")); assertEquals(5, contributors.size()); } }
package bisq.network.p2p.storage.persistence; import bisq.network.p2p.storage.P2PDataStorage; import bisq.network.p2p.storage.payload.PersistableNetworkPayload; import bisq.common.proto.persistable.PersistableEnvelope; import javax.inject.Inject; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import lombok.Getter; import lombok.extern.slf4j.Slf4j; /** * Used for PersistableNetworkPayload data which gets appended to a map storage. */ @Slf4j public class AppendOnlyDataStoreService { @Getter private final List<MapStoreService<? extends PersistableEnvelope, PersistableNetworkPayload>> services = new ArrayList<>(); // Constructor @Inject public AppendOnlyDataStoreService() { } public void addService(MapStoreService<? extends PersistableEnvelope, PersistableNetworkPayload> service) { services.add(service); } public void readFromResources(String postFix) { services.forEach(service -> service.readFromResources(postFix)); } public Map<P2PDataStorage.ByteArray, PersistableNetworkPayload> getMap() { return services.stream() .flatMap(service -> service.getMap().entrySet().stream()) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } public void put(P2PDataStorage.ByteArray hashAsByteArray, PersistableNetworkPayload payload) { services.stream() .filter(service -> service.canHandle(payload)) .forEach(service -> service.putIfAbsent(hashAsByteArray, payload)); } }
package com.intellij.openapi.projectRoots.impl; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.projectRoots.JavaSdkType; import com.intellij.openapi.projectRoots.JdkUtil; import com.intellij.openapi.projectRoots.ProjectJdkTable; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.projectRoots.impl.jdkDownloader.JdkInstaller; import com.intellij.openapi.projectRoots.impl.jdkDownloader.JdkInstallerStore; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.SystemInfoRt; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.StringUtilRt; import com.intellij.util.EnvironmentUtil; import com.intellij.util.SystemProperties; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.*; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; import static java.nio.file.Files.exists; import static java.nio.file.Files.isDirectory; import static java.util.Collections.emptySet; public class JavaHomeFinderBasic { private final Logger log = Logger.getInstance(getClass()); private final List<Supplier<Set<String>>> myFinders = new ArrayList<>(); JavaHomeFinderBasic(boolean forceEmbeddedJava, String... paths) { myFinders.add(this::checkDefaultLocations); myFinders.add(this::findInPATH); myFinders.add(() -> findInSpecifiedPaths(paths)); myFinders.add(this::findJavaInstalledBySdkMan); if (forceEmbeddedJava || Registry.is("java.detector.include.embedded", false)) { myFinders.add(() -> scanAll(getJavaHome(), false)); } } private @NotNull Set<String> findInSpecifiedPaths(String[] paths) { return scanAll(Stream.of(paths).map(it -> Paths.get(it)).collect(Collectors.toList()), true); } protected void registerFinder(@NotNull Supplier<Set<String>> finder) { myFinders.add(finder); } @NotNull public final Set<String> findExistingJdks() { Set<String> result = new TreeSet<>(); for (Supplier<Set<String>> action : myFinders) { try { result.addAll(action.get()); } catch (Exception e) { log.warn("Failed to find Java Home. " + e.getMessage(), e); } } return result; } private @NotNull Set<String> findInPATH() { try { String pathVarString = EnvironmentUtil.getValue("PATH"); if (pathVarString == null || pathVarString.isEmpty()) { return emptySet(); } Set<Path> dirsToCheck = new HashSet<>(); for (String p : pathVarString.split(File.pathSeparator)) { Path dir = Paths.get(p); if (!StringUtilRt.equal(dir.getFileName().toString(), "bin", SystemInfoRt.isFileSystemCaseSensitive)) { continue; } Path parentFile = dir.getParent(); if (parentFile == null) { continue; } dirsToCheck.addAll(listPossibleJdkInstallRootsFromHomes(parentFile)); } return scanAll(dirsToCheck, false); } catch (Exception e) { log.warn("Failed to scan PATH for JDKs. " + e.getMessage(), e); return emptySet(); } } @NotNull private Set<String> checkDefaultLocations() { if (ApplicationManager.getApplication() == null) { return emptySet(); } Set<Path> paths = new HashSet<>(); paths.add(JdkInstaller.getInstance().defaultInstallDir()); paths.addAll(JdkInstallerStore.getInstance().listJdkInstallHomes()); for (Sdk jdk : ProjectJdkTable.getInstance().getAllJdks()) { if (!(jdk.getSdkType() instanceof JavaSdkType) || jdk.getSdkType() instanceof DependentSdkType) { continue; } String homePath = jdk.getHomePath(); if (homePath == null) { continue; } paths.addAll(listPossibleJdkInstallRootsFromHomes(Paths.get(homePath))); } return scanAll(paths, true); } protected @NotNull Set<String> scanAll(@Nullable Path file, boolean includeNestDirs) { if (file == null) { return emptySet(); } return scanAll(Collections.singleton(file), includeNestDirs); } protected @NotNull Set<String> scanAll(@NotNull Collection<Path> files, boolean includeNestDirs) { Set<String> result = new HashSet<>(); for (Path root : new HashSet<>(files)) { scanFolder(root.toFile(), includeNestDirs, result); } return result; } private void scanFolder(@NotNull File folder, boolean includeNestDirs, @NotNull Collection<? super String> result) { if (JdkUtil.checkForJdk(folder)) { result.add(folder.getAbsolutePath()); return; } if (!includeNestDirs) return; File[] files = folder.listFiles(); if (files == null) return; for (File candidate : files) { for (File adjusted : listPossibleJdkHomesFromInstallRoot(candidate)) { scanFolder(adjusted, false, result); } } } @NotNull protected List<File> listPossibleJdkHomesFromInstallRoot(@NotNull File file) { return Collections.singletonList(file); } protected @NotNull List<Path> listPossibleJdkInstallRootsFromHomes(@NotNull Path file) { return Collections.singletonList(file); } protected static @Nullable Path getJavaHome() { String property = SystemProperties.getJavaHome(); if (property == null || property.isEmpty()) { return null; } // actually java.home points to to jre home Path javaHome = Path.of(property).getParent(); return javaHome == null || !isDirectory(javaHome) ? null : javaHome; } private @NotNull Set<@NotNull String> findJavaInstalledBySdkMan() { Path candidatesDir = findSdkManCandidatesDir(); if (candidatesDir == null) return emptySet(); Path javasDir = candidatesDir.resolve("java"); if (!isDirectory(javasDir)) return emptySet(); //noinspection UnnecessaryLocalVariable var homes = listJavaHomeDirsInstalledBySdkMan(javasDir); return homes; } @Nullable private static Path findSdkManCandidatesDir() { // first, try the special environment variable String candidatesPath = EnvironmentUtil.getValue("SDKMAN_CANDIDATES_DIR"); if (candidatesPath != null) { Path candidatesDir = Path.of(candidatesPath); if (isDirectory(candidatesDir)) return candidatesDir; } // then, try to use its 'primary' variable String primaryPath = EnvironmentUtil.getValue("SDKMAN_DIR"); if (primaryPath != null) { Path primaryDir = Path.of(primaryPath); if (isDirectory(primaryDir)) { Path candidatesDir = primaryDir.resolve("candidates"); if (isDirectory(candidatesDir)) return candidatesDir; } } // finally, try the usual location in Unix or MacOS if (!SystemInfo.isWindows) { String homePath = System.getProperty("user.home"); if (homePath != null) { Path homeDir = Path.of(homePath); Path primaryDir = homeDir.resolve(".sdkman"); Path candidatesDir = primaryDir.resolve("candidates"); if (isDirectory(candidatesDir)) return candidatesDir; } } // no chances return null; } private @NotNull Set<@NotNull String> listJavaHomeDirsInstalledBySdkMan(@NotNull Path javasDir) { var mac = SystemInfo.isMac; var result = new HashSet<@NotNull String>(); try { var innerDirectories = Files.list(javasDir).filter(d -> isDirectory(d)).collect(Collectors.toList()); for (Path innerDir: innerDirectories) { var home = innerDir; var releaseFile = home.resolve("release"); if (!exists(releaseFile)) continue; // Zulu JDK on MacOS has a rogue layout, with which Gradle failed to operate (see the bugreport IDEA-253051), // and in order to get Gradle working with Zulu JDK we should use it's second home (when symbolic links are resolved), boolean zuluOnMac = mac && Files.isSymbolicLink(releaseFile) && home.getFileName().toString().contains("zulu"); if (zuluOnMac) { try { var realReleaseFile = releaseFile.toRealPath(); if (!exists(realReleaseFile)) { log.warn("Failed to resolve the target file (it doesn't exist) for: " + releaseFile.toString()); continue; } var realHome = realReleaseFile.getParent(); if (realHome == null) { log.warn("Failed to resolve the target file (it has no parent dir) for: " + releaseFile.toString()); continue; } home = realHome; } catch (IOException ioe) { log.warn("Failed to resolve the target file (exception) for: " + releaseFile.toString() + ": " + ioe.getMessage()); } } result.add(home.toString()); } } catch (IOException ioe) { log.warn("Unexpected exception while listing Java home directories installed by Sdkman: "+ioe.getMessage(), ioe); return emptySet(); } return result; } }
package org.python.pydev.editor.preferences; import org.eclipse.jface.preference.IPreferenceStore; import org.eclipse.jface.preference.PreferenceConverter; import org.eclipse.jface.preference.PreferenceStore; import org.eclipse.jface.util.IPropertyChangeListener; import org.eclipse.jface.util.PropertyChangeEvent; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.StyleRange; import org.eclipse.swt.custom.StyledText; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.SelectionListener; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Combo; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.List; import org.eclipse.ui.preferences.IWorkbenchPreferenceContainer; import org.python.pydev.core.log.Log; import org.python.pydev.editor.StyledTextForShowingCodeFactory; import org.python.pydev.editor.actions.PyFormatStd; import org.python.pydev.editor.actions.PyFormatStd.FormatStd; import org.python.pydev.plugin.PydevPlugin; import org.python.pydev.plugin.preferences.AbstractPydevPrefs; import org.python.pydev.plugin.preferences.ColorEditor; import org.python.pydev.plugin.preferences.PydevPrefs; import org.python.pydev.shared_core.structure.Tuple; import org.python.pydev.shared_ui.field_editors.LinkFieldEditor; import org.python.pydev.shared_ui.utils.RunInUiThread; import org.python.pydev.shared_ui.word_boundaries.SubWordPreferences; /** * The preference page for setting the editor options. * <p> * This class is internal and not intended to be used by clients.</p> */ public class PydevEditorPrefs extends AbstractPydevPrefs { private static final String WORD_NAVIGATION_NATIVE_CAPTION = "Native"; private static final String WORD_NAVIGATION_SUBWORD_CAPTION = "Subword"; /** * Shows sample code with the new preferences. */ private StyledText labelExample; /** * A local store that has the preferences set given the user configuration of colors. */ private final IPreferenceStore localStore; /** * Helper to create the styled text and show the code later. */ private StyledTextForShowingCodeFactory formatAndStyleRangeHelper; private IPropertyChangeListener updateLabelExampleOnPrefsChanges; private Combo comboNavigation; public PydevEditorPrefs() { setDescription("PyDev editor appearance settings"); setPreferenceStore(PydevPlugin.getDefault().getPreferenceStore()); fOverlayStore = createOverlayStore(); localStore = new PreferenceStore(); } @Override protected void initialize() { super.initialize(); String caption = WORD_NAVIGATION_SUBWORD_CAPTION; if (fOverlayStore.getString(SubWordPreferences.WORD_NAVIGATION_STYLE) .equals(SubWordPreferences.WORD_NAVIGATION_STYLE_NATIVE)) { caption = WORD_NAVIGATION_NATIVE_CAPTION; } comboNavigation.setText(caption); } @Override protected Control createAppearancePage(Composite parent) { GridData gridData; Composite appearanceComposite = new Composite(parent, SWT.NONE); Composite wordNavigationComposite = new Composite(appearanceComposite, SWT.NONE); gridData = new GridData(GridData.FILL_HORIZONTAL); gridData.grabExcessHorizontalSpace = true; wordNavigationComposite.setLayoutData(gridData); GridLayout wordNavigationLayout = new GridLayout(); wordNavigationLayout.marginWidth = 0; wordNavigationLayout.marginRight = 5; wordNavigationLayout.numColumns = 2; wordNavigationComposite.setLayout(wordNavigationLayout); Label label = new Label(wordNavigationComposite, SWT.NONE); label.setText("Word navigation"); gridData = new GridData(); gridData.grabExcessHorizontalSpace = false; label.setLayoutData(gridData); comboNavigation = new Combo(wordNavigationComposite, SWT.CHECK); comboNavigation.add(WORD_NAVIGATION_SUBWORD_CAPTION); comboNavigation.add(WORD_NAVIGATION_NATIVE_CAPTION); comboNavigation.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { String text = comboNavigation.getText(); String style = SubWordPreferences.WORD_NAVIGATION_STYLE_SUBWORD; if (WORD_NAVIGATION_NATIVE_CAPTION.equals(text)) { style = SubWordPreferences.WORD_NAVIGATION_STYLE_SUBWORD; } fOverlayStore.setValue(SubWordPreferences.WORD_NAVIGATION_STYLE, style); } }); gridData = new GridData(GridData.FILL_HORIZONTAL); gridData.grabExcessHorizontalSpace = true; comboNavigation.setLayoutData(gridData); createColorOptions(appearanceComposite); Composite exampleComposite = new Composite(appearanceComposite, SWT.NONE); gridData = new GridData(GridData.FILL_HORIZONTAL); gridData.grabExcessHorizontalSpace = true; exampleComposite.setLayoutData(gridData); GridLayout exampleGridLayout = new GridLayout(); exampleGridLayout.marginWidth = 0; exampleGridLayout.marginRight = 5; exampleComposite.setLayout(exampleGridLayout); formatAndStyleRangeHelper = new StyledTextForShowingCodeFactory(); labelExample = formatAndStyleRangeHelper.createStyledTextForCodePresentation(exampleComposite); gridData = new GridData(GridData.FILL_HORIZONTAL); gridData.grabExcessHorizontalSpace = true; labelExample.setLayoutData(gridData); updateLabelExample(PyFormatStd.getFormat(null), PydevPrefs.getChainedPrefStore()); LinkFieldEditor tabsFieldEditor = new LinkFieldEditor("UNUSED", "Other settings:\n\n<a>Tabs</a>: tab preferences for PyDev ...\n(note: 'Insert spaces for tabs' in the general settings is ignored).", appearanceComposite, new SelectionListener() { @Override public void widgetSelected(SelectionEvent e) { String id = "org.python.pydev.editor.preferences.PyTabPreferencesPage"; IWorkbenchPreferenceContainer workbenchPreferenceContainer = ((IWorkbenchPreferenceContainer) getContainer()); workbenchPreferenceContainer.openPage(id, null); } @Override public void widgetDefaultSelected(SelectionEvent e) { } }); tabsFieldEditor.getLinkControl(appearanceComposite); LinkFieldEditor colorsAndFontsLinkFieldEditor = new LinkFieldEditor("UNUSED", "<a>Text Editors</a>: print margin, line numbers ...", appearanceComposite, new SelectionListener() { @Override public void widgetSelected(SelectionEvent e) { String id = "org.eclipse.ui.preferencePages.GeneralTextEditor"; IWorkbenchPreferenceContainer workbenchPreferenceContainer = ((IWorkbenchPreferenceContainer) getContainer()); workbenchPreferenceContainer.openPage(id, null); } @Override public void widgetDefaultSelected(SelectionEvent e) { } }); colorsAndFontsLinkFieldEditor.getLinkControl(appearanceComposite); colorsAndFontsLinkFieldEditor = new LinkFieldEditor("UNUSED", "<a>Colors and Fonts</a>: text font, content assist color ...", appearanceComposite, new SelectionListener() { @Override public void widgetSelected(SelectionEvent e) { String id = "org.eclipse.ui.preferencePages.ColorsAndFonts"; IWorkbenchPreferenceContainer workbenchPreferenceContainer = ((IWorkbenchPreferenceContainer) getContainer()); workbenchPreferenceContainer.openPage(id, null); } @Override public void widgetDefaultSelected(SelectionEvent e) { } }); colorsAndFontsLinkFieldEditor.getLinkControl(appearanceComposite); colorsAndFontsLinkFieldEditor = new LinkFieldEditor("UNUSED", "<a>Annotations</a>: occurrences, markers ...", appearanceComposite, new SelectionListener() { @Override public void widgetSelected(SelectionEvent e) { String id = "org.eclipse.ui.editors.preferencePages.Annotations"; IWorkbenchPreferenceContainer workbenchPreferenceContainer = ((IWorkbenchPreferenceContainer) getContainer()); workbenchPreferenceContainer.openPage(id, null); } @Override public void widgetDefaultSelected(SelectionEvent e) { } }); colorsAndFontsLinkFieldEditor.getLinkControl(appearanceComposite); return appearanceComposite; } private void createColorOptions(Composite appearanceComposite) { GridLayout layout; Label l = new Label(appearanceComposite, SWT.LEFT); GridData gd = new GridData(GridData.HORIZONTAL_ALIGN_FILL); gd.horizontalSpan = 2; gd.heightHint = convertHeightInCharsToPixels(1) / 2; l.setLayoutData(gd); l = new Label(appearanceComposite, SWT.LEFT); l.setText("Appearance color options:"); gd = new GridData(GridData.HORIZONTAL_ALIGN_FILL); gd.horizontalSpan = 2; l.setLayoutData(gd); Composite editorComposite = new Composite(appearanceComposite, SWT.NONE); layout = new GridLayout(); layout.numColumns = 2; layout.marginHeight = 2; layout.marginWidth = 0; editorComposite.setLayout(layout); gd = new GridData(GridData.FILL_HORIZONTAL); gd.grabExcessHorizontalSpace = true; editorComposite.setLayoutData(gd); fAppearanceColorList = new List(editorComposite, SWT.SINGLE | SWT.V_SCROLL | SWT.BORDER); gd = new GridData(GridData.FILL_HORIZONTAL); gd.heightHint = convertHeightInCharsToPixels(8); gd.grabExcessHorizontalSpace = true; fAppearanceColorList.setLayoutData(gd); Composite stylesComposite = new Composite(editorComposite, SWT.NONE); layout = new GridLayout(); layout.marginHeight = 0; layout.marginWidth = 0; layout.numColumns = 2; stylesComposite.setLayout(layout); l = new Label(stylesComposite, SWT.LEFT); l.setText("Color:"); gd = new GridData(); gd.horizontalAlignment = GridData.BEGINNING; l.setLayoutData(gd); fAppearanceColorEditor = new ColorEditor(stylesComposite); Button foregroundColorButton = fAppearanceColorEditor.getButton(); gd = new GridData(GridData.FILL_HORIZONTAL); gd.horizontalAlignment = GridData.BEGINNING; foregroundColorButton.setLayoutData(gd); SelectionListener colorDefaultSelectionListener = new SelectionListener() { @Override public void widgetSelected(SelectionEvent e) { boolean systemDefault = fAppearanceColorDefault.getSelection(); fAppearanceColorEditor.getButton().setEnabled(!systemDefault); int i = fAppearanceColorList.getSelectionIndex(); String key = fAppearanceColorListModel[i][2]; if (key != null) { fOverlayStore.setValue(key, systemDefault); } } @Override public void widgetDefaultSelected(SelectionEvent e) { } }; fAppearanceColorDefault = new Button(stylesComposite, SWT.CHECK); fAppearanceColorDefault.setText("System default"); gd = new GridData(GridData.FILL_HORIZONTAL); gd.horizontalAlignment = GridData.BEGINNING; gd.horizontalSpan = 2; fAppearanceColorDefault.setLayoutData(gd); fAppearanceColorDefault.setVisible(false); fAppearanceColorDefault.addSelectionListener(colorDefaultSelectionListener); fAppearanceColorList.addSelectionListener(new SelectionListener() { @Override public void widgetDefaultSelected(SelectionEvent e) { // do nothing } @Override public void widgetSelected(SelectionEvent e) { handleAppearanceColorListSelection(); } }); foregroundColorButton.addSelectionListener(new SelectionListener() { @Override public void widgetDefaultSelected(SelectionEvent e) { // do nothing } @Override public void widgetSelected(SelectionEvent e) { int i = fAppearanceColorList.getSelectionIndex(); String key = fAppearanceColorListModel[i][1]; PreferenceConverter.setValue(fOverlayStore, key, fAppearanceColorEditor.getColorValue()); onAppearanceRelatedPreferenceChanged(); } }); fFontBoldCheckBox = addStyleCheckBox(stylesComposite, "Bold"); fFontItalicCheckBox = addStyleCheckBox(stylesComposite, "Italic"); } public void updateLabelExample(FormatStd formatStd, IPreferenceStore store) { if (labelExample != null && !labelExample.isDisposed()) { String str = "class Example(object):\n" + "\n" + " backquotes = `backquotes`\n" + "\n" + " @memoize(size=10)\n" + " def Call(self, param1=None):\n" + " u'''unicode'''\n" + " return param1 + 10 * 10\n" + "\n" + " def Call2(self):\n" + " b'''bytes'''\n" + " #Comment\n" + " return self.Call(param1=10)" + ""; Tuple<String, StyleRange[]> result = formatAndStyleRangeHelper.formatAndGetStyleRanges(formatStd, str, store, false); labelExample.setText(result.o1); labelExample.setStyleRanges(result.o2); } } @Override protected void onAppearanceRelatedPreferenceChanged() { localStore.setValue(KEYWORD_COLOR, fOverlayStore.getString(KEYWORD_COLOR)); localStore.setValue(SELF_COLOR, fOverlayStore.getString(SELF_COLOR)); localStore.setValue(CODE_COLOR, fOverlayStore.getString(CODE_COLOR)); localStore.setValue(DECORATOR_COLOR, fOverlayStore.getString(DECORATOR_COLOR)); localStore.setValue(NUMBER_COLOR, fOverlayStore.getString(NUMBER_COLOR)); localStore.setValue(FUNC_NAME_COLOR, fOverlayStore.getString(FUNC_NAME_COLOR)); localStore.setValue(CLASS_NAME_COLOR, fOverlayStore.getString(CLASS_NAME_COLOR)); localStore.setValue(STRING_COLOR, fOverlayStore.getString(STRING_COLOR)); localStore.setValue(UNICODE_COLOR, fOverlayStore.getString(UNICODE_COLOR)); localStore.setValue(COMMENT_COLOR, fOverlayStore.getString(COMMENT_COLOR)); localStore.setValue(BACKQUOTES_COLOR, fOverlayStore.getString(BACKQUOTES_COLOR)); localStore.setValue(PARENS_COLOR, fOverlayStore.getString(PARENS_COLOR)); localStore.setValue(OPERATORS_COLOR, fOverlayStore.getString(OPERATORS_COLOR)); localStore.setValue(DOCSTRING_MARKUP_COLOR, fOverlayStore.getString(DOCSTRING_MARKUP_COLOR)); localStore.setValue(KEYWORD_STYLE, fOverlayStore.getInt(KEYWORD_STYLE)); localStore.setValue(SELF_STYLE, fOverlayStore.getInt(SELF_STYLE)); localStore.setValue(CODE_STYLE, fOverlayStore.getInt(CODE_STYLE)); localStore.setValue(DECORATOR_STYLE, fOverlayStore.getInt(DECORATOR_STYLE)); localStore.setValue(NUMBER_STYLE, fOverlayStore.getInt(NUMBER_STYLE)); localStore.setValue(FUNC_NAME_STYLE, fOverlayStore.getInt(FUNC_NAME_STYLE)); localStore.setValue(CLASS_NAME_STYLE, fOverlayStore.getInt(CLASS_NAME_STYLE)); localStore.setValue(STRING_STYLE, fOverlayStore.getInt(STRING_STYLE)); localStore.setValue(UNICODE_STYLE, fOverlayStore.getInt(UNICODE_STYLE)); localStore.setValue(COMMENT_STYLE, fOverlayStore.getInt(COMMENT_STYLE)); localStore.setValue(BACKQUOTES_STYLE, fOverlayStore.getInt(BACKQUOTES_STYLE)); localStore.setValue(PARENS_STYLE, fOverlayStore.getInt(PARENS_STYLE)); localStore.setValue(OPERATORS_STYLE, fOverlayStore.getInt(OPERATORS_STYLE)); localStore.setValue(DOCSTRING_MARKUP_STYLE, fOverlayStore.getInt(DOCSTRING_MARKUP_STYLE)); this.updateLabelExample(PyFormatStd.getFormat(null), localStore); } @Override public void dispose() { super.dispose(); if (formatAndStyleRangeHelper != null) { formatAndStyleRangeHelper.dispose(); formatAndStyleRangeHelper = null; } if (updateLabelExampleOnPrefsChanges != null) { PydevPrefs.getChainedPrefStore().removePropertyChangeListener(updateLabelExampleOnPrefsChanges); updateLabelExampleOnPrefsChanges = null; } if (labelExample != null) { try { labelExample.dispose(); } catch (Exception e) { Log.log(e); } labelExample = null; } } public void setUpdateLabelExampleOnPrefsChanges() { updateLabelExampleOnPrefsChanges = new IPropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent event) { RunInUiThread.async(new Runnable() { @Override public void run() { updateLabelExample(PyFormatStd.getFormat(null), PydevPrefs.getChainedPrefStore()); } }); } }; PydevPrefs.getChainedPrefStore().addPropertyChangeListener(updateLabelExampleOnPrefsChanges); } }
package no.deichman.services.kohaadapter; import static com.github.restdriver.clientdriver.ClientDriverRequest.Method.POST; import com.github.restdriver.clientdriver.ClientDriverRequest.Method; import com.github.restdriver.clientdriver.ClientDriverRule; import static com.github.restdriver.clientdriver.RestClientDriver.giveResponse; import static com.github.restdriver.clientdriver.RestClientDriver.onRequestTo; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import static javax.ws.rs.core.Response.Status.OK; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import java.nio.charset.StandardCharsets; import java.util.regex.Pattern; import org.apache.commons.io.IOUtils; import org.junit.Rule; import org.junit.Test; public class KohaAdapterImplTest { private static final int CLIENTDRIVER_PORT = 9210; private final KohaAdapterImpl kohaAdapter = new KohaAdapterImpl("http://localhost:" + CLIENTDRIVER_PORT); @Rule public final ClientDriverRule svcMock = new ClientDriverRule(CLIENTDRIVER_PORT); private void login(){ String authenticationOKResponse = "<?xml version='1.0' standalone='yes'?>\n" +"<response>\n" +" <status>ok</status>\n" +"</response>"; svcMock.addExpectation( onRequestTo("/cgi-bin/koha/svc/authentication") .withMethod(POST) .withBody("userid=admin&password=secret", MediaType.APPLICATION_FORM_URLENCODED_TYPE.toString()), giveResponse(authenticationOKResponse, "text/xml").withStatus(OK.getStatusCode()) .withHeader(HttpHeaders.SET_COOKIE, KohaAdapterImpl.SESSION_COOKIE_KEY + "=huh")); } @Test public void should_have_default_constructor() { assertNotNull(new KohaAdapterImpl()); } @Test public void should_return_a_biblio() throws Exception { login(); svcMock.addExpectation( onRequestTo("/cgi-bin/koha/svc/bib/626460") .withMethod(Method.GET) .withParam("items", 1) .withHeader(HttpHeaders.COOKIE, Pattern.compile(".*CGISESSID=huh.*")), giveResponse( IOUtils.toString(getClass().getClassLoader().getResourceAsStream("ragde.marcxml"), StandardCharsets.UTF_8), "application/xml")); assertNotNull(kohaAdapter.getBiblio("626460")); } @Test public void should_return_new_biblio_ID() throws Exception { login(); String responseXml = "<?xml version='1.0' standalone='yes'?>\n" + "<response>\n" + " <biblionumber>26</biblionumber>\n" + " <marcxml>\n" + "<record\n" + " xmlns:xsi=\"http: + " xsi:schemaLocation=\"http: + " xmlns=\"http: + "\n" + " <leader>00049 a2200037 4500</leader>\n" + " <datafield tag=\"999\" ind1=\" \" ind2=\" \">\n" + " <subfield code=\"c\">26</subfield>\n" + " <subfield code=\"d\">26</subfield>\n" + " </datafield>\n" + "</record>\n" + "</marcxml>\n" + " <status>ok</status>\n" + "</response>\n"; String expectedPayload = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + "<marcxml:collection xmlns:marcxml=\"http: + "<marcxml:record><marcxml:leader>00000 2200000 </marcxml:leader>" + "</marcxml:record>" + "</marcxml:collection>\n"; svcMock.addExpectation( onRequestTo("/cgi-bin/koha/svc/new_bib") .withMethod(POST) .withBody(expectedPayload, MediaType.TEXT_XML) .withHeader(HttpHeaders.COOKIE, Pattern.compile(".*CGISESSID=huh.*")), giveResponse(responseXml, "text/xml; charset=ISO-8859-1") .withStatus(OK.getStatusCode())); String biblioId = kohaAdapter.getNewBiblio(); assertEquals("26",biblioId); } }
package com.intellij.refactoring.introduceVariable; import com.intellij.codeInsight.CodeInsightUtil; import com.intellij.codeInsight.unwrap.ScopeHighlighter; import com.intellij.featureStatistics.FeatureUsageTracker; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.*; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.popup.JBPopup; import com.intellij.openapi.ui.popup.JBPopupAdapter; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.util.Pass; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.refactoring.IntroduceHandlerBase; import com.intellij.refactoring.RefactoringActionHandler; import com.intellij.refactoring.RefactoringBundle; import com.intellij.refactoring.introduceField.ElementToWorkOn; import com.intellij.refactoring.ui.TypeSelectorManagerImpl; import com.intellij.refactoring.util.*; import com.intellij.refactoring.util.occurences.ExpressionOccurenceManager; import com.intellij.refactoring.util.occurences.NotInSuperCallOccurenceFilter; import com.intellij.util.IncorrectOperationException; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import java.awt.*; import java.util.ArrayList; import java.util.List; public abstract class IntroduceVariableBase extends IntroduceHandlerBase implements RefactoringActionHandler { private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.introduceVariable.IntroduceVariableBase"); protected static String REFACTORING_NAME = RefactoringBundle.message("introduce.variable.title"); public void invoke(@NotNull final Project project, final Editor editor, final PsiFile file, DataContext dataContext) { if (!editor.getSelectionModel().hasSelection()) { final int offset = editor.getCaretModel().getOffset(); final PsiElement[] statementsInRange = findStatementsAtOffset(editor, file, offset); if (statementsInRange.length == 1 && PsiUtil.hasErrorElementChild(statementsInRange[0])) { editor.getSelectionModel().selectLineAtCaret(); } else { final PsiElement elementAtCaret = file.findElementAt(offset); final List<PsiExpression> expressions = new ArrayList<PsiExpression>(); PsiExpression expression = PsiTreeUtil.getParentOfType(elementAtCaret, PsiExpression.class); while (expression != null) { if (!(expression instanceof PsiReferenceExpression) && !(expression instanceof PsiParenthesizedExpression) && !(expression instanceof PsiSuperExpression) && expression.getType() != PsiType.VOID) { expressions.add(expression); } expression = PsiTreeUtil.getParentOfType(expression, PsiExpression.class); } if (expressions.isEmpty()) { editor.getSelectionModel().selectLineAtCaret(); } else if (expressions.size() == 1) { final TextRange textRange = expressions.get(0).getTextRange(); editor.getSelectionModel().setSelection(textRange.getStartOffset(), textRange.getEndOffset()); } else { showChooser(editor, expressions, new Pass<PsiExpression>(){ public void pass(final PsiExpression selectedValue) { invoke(project, editor, file, selectedValue.getTextRange().getStartOffset(), selectedValue.getTextRange().getEndOffset()); } }); return; } } } if (invoke(project, editor, file, editor.getSelectionModel().getSelectionStart(), editor.getSelectionModel().getSelectionEnd())) { editor.getSelectionModel().removeSelection(); } } public static PsiElement[] findStatementsAtOffset(final Editor editor, final PsiFile file, final int offset) { final Document document = editor.getDocument(); final int lineNumber = document.getLineNumber(offset); final int lineStart = document.getLineStartOffset(lineNumber); final int lineEnd = document.getLineEndOffset(lineNumber); return CodeInsightUtil.findStatementsInRange(file, lineStart, lineEnd); } public static void showChooser(final Editor editor, final List<PsiExpression> expressions, final Pass<PsiExpression> callback) { final ScopeHighlighter highlighter = new ScopeHighlighter(editor); final DefaultListModel model = new DefaultListModel(); for (PsiExpression expr : expressions) { model.addElement(expr); } final JList list = new JList(model); list.setCellRenderer(new DefaultListCellRenderer() { void appendText(PsiExpression expr, StringBuffer buf) { if (expr instanceof PsiNewExpression) { final PsiAnonymousClass anonymousClass = ((PsiNewExpression)expr).getAnonymousClass(); if (anonymousClass != null) { buf.append("new ").append(anonymousClass.getBaseClassType().getPresentableText()).append("(...) {...}"); } else { buf.append(expr.getText()); } } else if (expr instanceof PsiReferenceExpression) { final PsiExpression qualifierExpression = ((PsiReferenceExpression)expr).getQualifierExpression(); if (qualifierExpression != null) { appendText(qualifierExpression, buf); buf.append("."); } buf.append(((PsiReferenceExpression)expr).getReferenceName()); } else if (expr instanceof PsiMethodCallExpression) { appendText(((PsiMethodCallExpression)expr).getMethodExpression(), buf); final PsiExpression[] args = ((PsiMethodCallExpression)expr).getArgumentList().getExpressions(); if (args.length > 0) { buf.append("(...)"); } else { buf.append("()"); } } else { buf.append(expr.getText()); } } @Override public Component getListCellRendererComponent(final JList list, final Object value, final int index, final boolean isSelected, final boolean cellHasFocus) { final Component rendererComponent = super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus); final StringBuffer buf = new StringBuffer(); appendText((PsiExpression)value, buf); setText(buf.toString()); return rendererComponent; } }); list.addListSelectionListener(new ListSelectionListener() { public void valueChanged(final ListSelectionEvent e) { highlighter.dropHighlight(); final int index = list.getSelectedIndex(); if (index < 0 ) return; final PsiExpression expr = (PsiExpression)model.get(index); final ArrayList<PsiElement> toExtract = new ArrayList<PsiElement>(); toExtract.add(expr); highlighter.highlight(expr, toExtract); } }); JBPopupFactory.getInstance().createListPopupBuilder(list) .setTitle("Expressions") .setMovable(false) .setResizable(false) .setRequestFocus(true) .setItemChoosenCallback(new Runnable() { public void run() { callback.pass((PsiExpression)list.getSelectedValue()); } }) .addListener(new JBPopupAdapter() { @Override public void onClosed(JBPopup popup) { highlighter.dropHighlight(); } }) .createPopup().showInBestPositionFor(editor); } private boolean invoke(final Project project, final Editor editor, PsiFile file, int startOffset, int endOffset) { FeatureUsageTracker.getInstance().triggerFeatureUsed("refactoring.introduceVariable"); PsiDocumentManager.getInstance(project).commitAllDocuments(); PsiExpression tempExpr = CodeInsightUtil.findExpressionInRange(file, startOffset, endOffset); if (tempExpr == null) { PsiElement[] statements = CodeInsightUtil.findStatementsInRange(file, startOffset, endOffset); if (statements.length == 1 && statements[0] instanceof PsiExpressionStatement) { tempExpr = ((PsiExpressionStatement) statements[0]).getExpression(); } } if (tempExpr == null) { tempExpr = getSelectedExpression(project, file, startOffset, endOffset); } return invokeImpl(project, tempExpr, editor); } public static PsiExpression getSelectedExpression(final Project project, final PsiFile file, final int startOffset, final int endOffset) { PsiExpression tempExpr; final PsiElement elementAt = PsiTreeUtil.findCommonParent(file.findElementAt(startOffset), file.findElementAt(endOffset - 1)); final PsiLiteralExpression literalExpression = PsiTreeUtil.getParentOfType(elementAt, PsiLiteralExpression.class); final PsiElementFactory elementFactory = JavaPsiFacade.getInstance(project).getElementFactory(); try { String text = file.getText().subSequence(startOffset, endOffset).toString(); String prefix = null; String suffix = null; if (literalExpression != null) { final String stripped = StringUtil.stripQuotesAroundValue(text); boolean primitive = false; if (stripped.equals("true") || stripped.equals("false")) { primitive = true; } else { try { Integer.parseInt(stripped); primitive = true; } catch (NumberFormatException e1) { //then not primitive } } text = primitive ? stripped : ("\"" + stripped + "\""); final int offset = literalExpression.getTextOffset(); if (offset + 1 < startOffset) { prefix = "\" + "; } if (offset + literalExpression.getTextLength() - 1 > endOffset) { suffix = " + \""; } } else { text = text.trim(); } tempExpr = elementFactory.createExpressionFromText(text, file); tempExpr.putUserData(ElementToWorkOn.PREFIX, prefix); tempExpr.putUserData(ElementToWorkOn.SUFFIX, suffix); tempExpr.putUserData(ElementToWorkOn.TEXT_RANGE, FileDocumentManager.getInstance().getDocument(file.getVirtualFile()).createRangeMarker(startOffset, endOffset)); tempExpr.putUserData(ElementToWorkOn.PARENT, literalExpression != null ? literalExpression : elementAt); } catch (IncorrectOperationException e) { tempExpr = null; } return tempExpr; } protected boolean invokeImpl(final Project project, final PsiExpression expr, final Editor editor) { if (expr != null && expr.getParent() instanceof PsiExpressionStatement) { FeatureUsageTracker.getInstance().triggerFeatureUsed("refactoring.introduceVariable.incompleteStatement"); } if (LOG.isDebugEnabled()) { LOG.debug("expression:" + expr); } if (expr == null) { String message = RefactoringBundle.getCannotRefactorMessage(RefactoringBundle.message("selected.block.should.represent.an.expression")); showErrorMessage(project, editor, message); return false; } final PsiElementFactory factory = JavaPsiFacade.getInstance(project).getElementFactory(); PsiType originalType = RefactoringUtil.getTypeByExpressionWithExpectedType(expr); if (originalType == null) { String message = RefactoringBundle.getCannotRefactorMessage(RefactoringBundle.message("unknown.expression.type")); showErrorMessage(project, editor, message); return false; } if(originalType == PsiType.VOID) { String message = RefactoringBundle.getCannotRefactorMessage(RefactoringBundle.message("selected.expression.has.void.type")); showErrorMessage(project, editor, message); return false; } final PsiElement physicalElement = expr.getUserData(ElementToWorkOn.PARENT); PsiElement anchorStatement = RefactoringUtil.getParentStatement(physicalElement != null ? physicalElement : expr, false); if (anchorStatement == null) { return parentStatementNotFound(project, editor); } if (anchorStatement instanceof PsiExpressionStatement) { PsiExpression enclosingExpr = ((PsiExpressionStatement)anchorStatement).getExpression(); if (enclosingExpr instanceof PsiMethodCallExpression) { PsiMethod method = ((PsiMethodCallExpression)enclosingExpr).resolveMethod(); if (method != null && method.isConstructor()) { //This is either 'this' or 'super', both must be the first in the respective contructor String message = RefactoringBundle.getCannotRefactorMessage(RefactoringBundle.message("invalid.expression.context")); showErrorMessage(project, editor, message); return false; } } } PsiElement tempContainer = anchorStatement.getParent(); if (!(tempContainer instanceof PsiCodeBlock) && !isLoopOrIf(tempContainer)) { String message = RefactoringBundle.message("refactoring.is.not.supported.in.the.current.context", REFACTORING_NAME); showErrorMessage(project, editor, message); return false; } if(!NotInSuperCallOccurenceFilter.INSTANCE.isOK(expr)) { String message = RefactoringBundle.getCannotRefactorMessage(RefactoringBundle.message("cannot.introduce.variable.in.super.constructor.call")); showErrorMessage(project, editor, message); return false; } final PsiFile file = anchorStatement.getContainingFile(); LOG.assertTrue(file != null, "expr.getContainingFile() == null"); if (!CommonRefactoringUtil.checkReadOnlyStatus(project, file)) return false; PsiElement containerParent = tempContainer; PsiElement lastScope = tempContainer; while (true) { if (containerParent instanceof PsiFile) break; if (containerParent instanceof PsiMethod) break; containerParent = containerParent.getParent(); if (containerParent instanceof PsiCodeBlock) { lastScope = containerParent; } } ExpressionOccurenceManager occurenceManager = new ExpressionOccurenceManager(expr, lastScope, NotInSuperCallOccurenceFilter.INSTANCE); final PsiExpression[] occurrences = occurenceManager.getOccurences(); final PsiElement anchorStatementIfAll = occurenceManager.getAnchorStatementForAll(); boolean declareFinalIfAll = occurenceManager.isInFinalContext(); boolean anyAssignmentLHS = false; for (PsiExpression occurrence : occurrences) { if (RefactoringUtil.isAssignmentLHS(occurrence)) { anyAssignmentLHS = true; break; } } IntroduceVariableSettings settings = getSettings(project, editor, expr, occurrences, anyAssignmentLHS, declareFinalIfAll, originalType, new TypeSelectorManagerImpl(project, originalType, expr, occurrences), new InputValidator(this, project, anchorStatementIfAll, anchorStatement, occurenceManager)); if (!settings.isOK()) { return false; } final String variableName = settings.getEnteredName(); final PsiType type = settings.getSelectedType(); final boolean replaceAll = settings.isReplaceAllOccurrences(); final boolean replaceWrite = settings.isReplaceLValues(); final boolean declareFinal = replaceAll && declareFinalIfAll || settings.isDeclareFinal(); if (replaceAll) { anchorStatement = anchorStatementIfAll; tempContainer = anchorStatement.getParent(); } final PsiElement container = tempContainer; PsiElement child = anchorStatement; if (!isLoopOrIf(container)) { child = locateAnchor(child); } final PsiElement anchor = child == null ? anchorStatement : child; boolean tempDeleteSelf = false; final boolean replaceSelf = replaceWrite || !RefactoringUtil.isAssignmentLHS(expr); if (!isLoopOrIf(container)) { if (expr.getParent() instanceof PsiExpressionStatement && anchor.equals(anchorStatement)) { PsiStatement statement = (PsiStatement) expr.getParent(); PsiElement parent = statement.getParent(); if (parent instanceof PsiCodeBlock || //fabrique parent instanceof PsiCodeFragment) { tempDeleteSelf = true; } } tempDeleteSelf &= replaceSelf; } final boolean deleteSelf = tempDeleteSelf; final int col = editor != null ? editor.getCaretModel().getLogicalPosition().column : 0; final int line = editor != null ? editor.getCaretModel().getLogicalPosition().line : 0; if (deleteSelf) { if (editor != null) { LogicalPosition pos = new LogicalPosition(line, col); editor.getCaretModel().moveToLogicalPosition(pos); } } final PsiCodeBlock newDeclarationScope = PsiTreeUtil.getParentOfType(container, PsiCodeBlock.class, false); final FieldConflictsResolver fieldConflictsResolver = new FieldConflictsResolver(variableName, newDeclarationScope); final PsiElement finalAnchorStatement = anchorStatement; final Runnable runnable = new Runnable() { public void run() { try { PsiStatement statement = null; final boolean isInsideLoop = isLoopOrIf(container); if (!isInsideLoop && deleteSelf) { statement = (PsiStatement) expr.getParent(); } final PsiExpression expr1 = fieldConflictsResolver.fixInitializer(expr); PsiExpression initializer = RefactoringUtil.unparenthesizeExpression(expr1); if (expr1 instanceof PsiNewExpression) { final PsiNewExpression newExpression = (PsiNewExpression)expr1; if (newExpression.getArrayInitializer() != null) { initializer = newExpression.getArrayInitializer(); } } PsiDeclarationStatement declaration = factory.createVariableDeclarationStatement(variableName, type, initializer); if (!isInsideLoop) { declaration = (PsiDeclarationStatement) container.addBefore(declaration, anchor); LOG.assertTrue(expr1.isValid()); if (deleteSelf) { // never true final PsiElement lastChild = statement.getLastChild(); if (lastChild instanceof PsiComment) { // keep trailing comment declaration.addBefore(lastChild, null); } statement.delete(); if (editor != null) { LogicalPosition pos = new LogicalPosition(line, col); editor.getCaretModel().moveToLogicalPosition(pos); editor.getCaretModel().moveToOffset(declaration.getTextRange().getEndOffset()); editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE); editor.getSelectionModel().removeSelection(); } } } PsiExpression ref = factory.createExpressionFromText(variableName, null); if (replaceAll) { ArrayList<PsiElement> array = new ArrayList<PsiElement>(); for (PsiExpression occurrence : occurrences) { if (deleteSelf && occurrence.equals(expr)) continue; if (occurrence.equals(expr)) { occurrence = expr1; } if (occurrence != null) { occurrence = RefactoringUtil.outermostParenthesizedExpression(occurrence); } if (replaceWrite || !RefactoringUtil.isAssignmentLHS(occurrence)) { array.add(occurrence.replace(ref)); } } if (editor != null) { final PsiElement[] replacedOccurences = array.toArray(new PsiElement[array.size()]); highlightReplacedOccurences(project, editor, replacedOccurences); } } else { if (!deleteSelf && replaceSelf) { replace(expr1, ref, file); } } declaration = (PsiDeclarationStatement) putStatementInLoopBody(declaration, container, finalAnchorStatement); PsiVariable var = (PsiVariable) declaration.getDeclaredElements()[0]; var.getModifierList().setModifierProperty(PsiModifier.FINAL, declareFinal); fieldConflictsResolver.fix(); } catch (IncorrectOperationException e) { LOG.error(e); } } }; CommandProcessor.getInstance().executeCommand( project, new Runnable() { public void run() { ApplicationManager.getApplication().runWriteAction(runnable); } }, REFACTORING_NAME, null); return true; } public static PsiElement replace(final PsiExpression expr1, final PsiExpression ref, final PsiFile file) throws IncorrectOperationException { final PsiExpression expr2 = RefactoringUtil.outermostParenthesizedExpression(expr1); if (expr2.isPhysical()) { return expr2.replace(ref); } else { final String prefix = expr1.getUserData(ElementToWorkOn.PREFIX); final String suffix = expr1.getUserData(ElementToWorkOn.SUFFIX); final PsiElement parent = expr1.getUserData(ElementToWorkOn.PARENT); final RangeMarker rangeMarker = expr1.getUserData(ElementToWorkOn.TEXT_RANGE); final String allText = parent.getContainingFile().getText(); final TextRange parentRange = parent.getTextRange(); String beg = allText.substring(parentRange.getStartOffset(), rangeMarker.getStartOffset()); if (StringUtil.stripQuotesAroundValue(beg).length() == 0) beg = ""; String end = allText.substring(rangeMarker.getEndOffset(), parentRange.getEndOffset()); if (StringUtil.stripQuotesAroundValue(end).length() == 0) end = ""; final String text = beg + (prefix != null ? prefix : "") + ref.getText() + (suffix != null ? suffix : "") + end; final PsiExpression el = JavaPsiFacade.getInstance(file.getProject()).getElementFactory().createExpressionFromText(text, file); return parent.replace(el); } } public static PsiStatement putStatementInLoopBody(PsiStatement declaration, PsiElement container, PsiElement finalAnchorStatement) throws IncorrectOperationException { if(isLoopOrIf(container)) { PsiStatement loopBody = getLoopBody(container, finalAnchorStatement); PsiStatement loopBodyCopy = loopBody != null ? (PsiStatement) loopBody.copy() : null; PsiBlockStatement blockStatement = (PsiBlockStatement)JavaPsiFacade.getInstance(container.getProject()).getElementFactory() .createStatementFromText("{}", null); blockStatement = (PsiBlockStatement) CodeStyleManager.getInstance(container.getProject()).reformat(blockStatement); final PsiElement prevSibling = loopBody.getPrevSibling(); if(prevSibling instanceof PsiWhiteSpace) { final PsiElement pprev = prevSibling.getPrevSibling(); if (!(pprev instanceof PsiComment) || !((PsiComment)pprev).getTokenType().equals(JavaTokenType.END_OF_LINE_COMMENT)) { prevSibling.delete(); } } blockStatement = (PsiBlockStatement) loopBody.replace(blockStatement); final PsiCodeBlock codeBlock = blockStatement.getCodeBlock(); declaration = (PsiStatement) codeBlock.add(declaration); JavaCodeStyleManager.getInstance(declaration.getProject()).shortenClassReferences(declaration); if (loopBodyCopy != null) codeBlock.add(loopBodyCopy); } return declaration; } private boolean parentStatementNotFound(final Project project, Editor editor) { String message = RefactoringBundle.message("refactoring.is.not.supported.in.the.current.context", REFACTORING_NAME); showErrorMessage(project, editor, message); return false; } protected boolean invokeImpl(Project project, PsiLocalVariable localVariable, Editor editor) { throw new UnsupportedOperationException(); } private static PsiElement locateAnchor(PsiElement child) { while (child != null) { PsiElement prev = child.getPrevSibling(); if (prev instanceof PsiStatement) break; if (prev instanceof PsiJavaToken && ((PsiJavaToken)prev).getTokenType() == JavaTokenType.LBRACE) break; child = prev; } while (child instanceof PsiWhiteSpace || child instanceof PsiComment) { child = child.getNextSibling(); } return child; } protected abstract void highlightReplacedOccurences(Project project, Editor editor, PsiElement[] replacedOccurences); protected abstract IntroduceVariableSettings getSettings(Project project, Editor editor, PsiExpression expr, final PsiElement[] occurrences, boolean anyAssignmentLHS, final boolean declareFinalIfAll, final PsiType type, TypeSelectorManagerImpl typeSelectorManager, InputValidator validator); protected abstract void showErrorMessage(Project project, Editor editor, String message); @Nullable private static PsiStatement getLoopBody(PsiElement container, PsiElement anchorStatement) { if(container instanceof PsiLoopStatement) { return ((PsiLoopStatement) container).getBody(); } else if (container instanceof PsiIfStatement) { final PsiStatement thenBranch = ((PsiIfStatement)container).getThenBranch(); if (thenBranch != null && PsiTreeUtil.isAncestor(thenBranch, anchorStatement, false)) { return thenBranch; } final PsiStatement elseBranch = ((PsiIfStatement)container).getElseBranch(); if (elseBranch != null && PsiTreeUtil.isAncestor(elseBranch, anchorStatement, false)) { return elseBranch; } LOG.assertTrue(false); } LOG.assertTrue(false); return null; } public static boolean isLoopOrIf(PsiElement element) { return element instanceof PsiLoopStatement || element instanceof PsiIfStatement; } public interface Validator { boolean isOK(IntroduceVariableSettings dialog); } protected abstract boolean reportConflicts(ArrayList<String> conflicts, final Project project); public static void checkInLoopCondition(PsiExpression occurence, List<String> conflicts) { final PsiElement loopForLoopCondition = RefactoringUtil.getLoopForLoopCondition(occurence); if (loopForLoopCondition == null) return; final List<PsiVariable> referencedVariables = RefactoringUtil.collectReferencedVariables(occurence); final List<PsiVariable> modifiedInBody = new ArrayList<PsiVariable>(); for (PsiVariable psiVariable : referencedVariables) { if (RefactoringUtil.isModifiedInScope(psiVariable, loopForLoopCondition)) { modifiedInBody.add(psiVariable); } } if (!modifiedInBody.isEmpty()) { for (PsiVariable variable : modifiedInBody) { final String message = RefactoringBundle.message("is.modified.in.loop.body", RefactoringUIUtil.getDescription(variable, false)); conflicts.add(ConflictsUtil.capitalize(message)); } conflicts.add(RefactoringBundle.message("introducing.variable.may.break.code.logic")); } } }
package org.apache.lucene.misc; import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.Filter; import java.io.IOException; import java.util.BitSet; /** * <p> * Allows multiple {@link Filter}s to be chained. * Logical operations such as <b>NOT</b> and <b>XOR</b> * are applied between filters. One operation can be used * for all filters, or a specific operation can be declared * for each filter. * </p> * <p> * Order in which filters are called depends on * the position of the filter in the chain. It's probably * more efficient to place the most restrictive filters * /least computationally-intensive filters first. * </p> * * @author <a href="mailto:kelvint@apache.org">Kelvin Tan</a> */ public class ChainedFilter extends Filter { /** * {@link BitSet#or}. */ public static final int OR = 0; /** * {@link BitSet#and}. */ public static final int AND = 1; /** * {@link BitSet#andNot}. */ public static final int ANDNOT = 2; /** * {@link BitSet#xor}. */ public static final int XOR = 3; /** * Logical operation when none is declared. Defaults to * {@link BitSet#or}. */ public static int DEFAULT = OR; /** The filter chain */ private Filter[] chain = null; private int[] logicArray; private int logic = -1; /** * Ctor. * @param chain The chain of filters */ public ChainedFilter(Filter[] chain) { this.chain = chain; } /** * Ctor. * @param chain The chain of filters * @param logicArray Logical operations to apply between filters */ public ChainedFilter(Filter[] chain, int[] logicArray) { this.chain = chain; this.logicArray = logicArray; } /** * Ctor. * @param chain The chain of filters * @param logic Logicial operation to apply to ALL filters */ public ChainedFilter(Filter[] chain, int logic) { this.chain = chain; this.logic = logic; } /** * {@link Filter#bits}. */ public BitSet bits(IndexReader reader) throws IOException { if (logic != -1) return bits(reader, logic); else if (logicArray != null) return bits(reader, logicArray); else return bits(reader, DEFAULT); } /** * Delegates to each filter in the chain. * @param reader IndexReader * @param logic Logical operation * @return BitSet */ private BitSet bits(IndexReader reader, int logic) throws IOException { BitSet result; int i = 0; /** * First AND operation takes place against a completely false * bitset and will always return zero results. Thanks to * Daniel Armbrust for pointing this out and suggesting workaround. */ if (logic == AND) { result = chain[i].bits(reader); ++i; } else { result = new BitSet(reader.maxDoc()); } for (; i < chain.length; i++) { doChain(result, reader, logic, chain[i]); } return result; } /** * Delegates to each filter in the chain. * @param reader IndexReader * @param logic Logical operation * @return BitSet */ private BitSet bits(IndexReader reader, int[] logic) throws IOException { if (logic.length != chain.length) throw new IllegalArgumentException("Invalid number of elements in logic array"); BitSet result; int i = 0; /** * First AND operation takes place against a completely false * bitset and will always return zero results. Thanks to * Daniel Armbrust for pointing this out and suggesting workaround. */ if (logic[0] == AND) { result = chain[i].bits(reader); ++i; } else { result = new BitSet(reader.maxDoc()); } for (; i < chain.length; i++) { doChain(result, reader, logic[i], chain[i]); } return result; } public String toString() { StringBuffer sb = new StringBuffer(); sb.append("ChainedFilter: ["); for (int i = 0; i < chain.length; i++) { sb.append(chain[i]); sb.append(' '); } sb.append(']'); return sb.toString(); } private void doChain(BitSet result, IndexReader reader, int logic, Filter filter) throws IOException { switch (logic) { case OR: result.or(filter.bits(reader)); break; case AND: result.and(filter.bits(reader)); break; case ANDNOT: result.andNot(filter.bits(reader)); break; case XOR: result.xor(filter.bits(reader)); break; default: doChain(result, reader, DEFAULT, filter); break; } } }
package com.seaglasslookandfeel.demo; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Dimension; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.SwingUtilities; import javax.swing.UIManager; public class TestScrollBars { public static void main(String[] args) { if (true) { try { UIManager.setLookAndFeel("com.seaglasslookandfeel.SeaGlassLookAndFeel"); } catch (Exception e) { e.printStackTrace(); } } SwingUtilities.invokeLater(new Runnable() { public void run() { JPanel panel = new JPanel(); panel.setPreferredSize(new Dimension(500, 500)); panel.setBackground(Color.WHITE); JScrollPane scrollPane = new JScrollPane(panel); JFrame frame = new JFrame(); frame.add(scrollPane, BorderLayout.CENTER); frame.setSize(275, 125); frame.setLocationRelativeTo(null); frame.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); frame.setVisible(true); } }); } }
package gov.nih.nci.cananolab.service.particle.helper; import gov.nih.nci.cananolab.domain.common.DerivedBioAssayData; import gov.nih.nci.cananolab.domain.common.Keyword; import gov.nih.nci.cananolab.domain.common.LabFile; import gov.nih.nci.cananolab.domain.particle.NanoparticleSample; import gov.nih.nci.cananolab.domain.particle.characterization.Characterization; import gov.nih.nci.cananolab.domain.particle.samplecomposition.Function; import gov.nih.nci.cananolab.domain.particle.samplecomposition.OtherFunction; import gov.nih.nci.cananolab.domain.particle.samplecomposition.base.ComposingElement; import gov.nih.nci.cananolab.domain.particle.samplecomposition.base.NanoparticleEntity; import gov.nih.nci.cananolab.domain.particle.samplecomposition.base.OtherNanoparticleEntity; import gov.nih.nci.cananolab.domain.particle.samplecomposition.functionalization.FunctionalizingEntity; import gov.nih.nci.cananolab.domain.particle.samplecomposition.functionalization.OtherFunctionalizingEntity; import gov.nih.nci.cananolab.service.common.helper.FileServiceHelper; import gov.nih.nci.cananolab.system.applicationservice.CustomizedApplicationService; import gov.nih.nci.cananolab.util.CaNanoLabConstants; import gov.nih.nci.cananolab.util.ClassUtils; import gov.nih.nci.cananolab.util.StringUtils; import gov.nih.nci.cananolab.util.TextMatchMode; import gov.nih.nci.system.client.ApplicationServiceProvider; import gov.nih.nci.system.query.hibernate.HQLCriteria; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.SortedSet; import java.util.TreeSet; import org.hibernate.FetchMode; import org.hibernate.criterion.CriteriaSpecification; import org.hibernate.criterion.Criterion; import org.hibernate.criterion.DetachedCriteria; import org.hibernate.criterion.Disjunction; import org.hibernate.criterion.MatchMode; import org.hibernate.criterion.Property; import org.hibernate.criterion.Restrictions; /** * Helper class providing implementations of search methods needed for both * local implementation of NanoparticleSampleService and grid service * * * @author pansu, tanq * */ public class NanoparticleSampleServiceHelper { public List<NanoparticleSample> findNanoparticleSamplesBy( String particleSource, String[] nanoparticleEntityClassNames, String[] otherNanoparticleTypes, String[] functionalizingEntityClassNames, String[] otherFunctionalizingEntityTypes, String[] functionClassNames, String[] otherFunctionTypes, String[] characterizationClassNames, String[] wordList) throws Exception { List<NanoparticleSample> particles = new ArrayList<NanoparticleSample>(); DetachedCriteria crit = DetachedCriteria .forClass(NanoparticleSample.class); // source if (particleSource != null && particleSource.length() > 0) { TextMatchMode sourceMatchMode = new TextMatchMode(particleSource); crit.createAlias("source", "source", CriteriaSpecification.LEFT_JOIN).add( Restrictions.ilike("source.organizationName", sourceMatchMode.getUpdatedText(), sourceMatchMode .getMatchMode())); } // nanoparticle entity if (nanoparticleEntityClassNames != null && nanoparticleEntityClassNames.length > 0 || otherNanoparticleTypes != null && otherNanoparticleTypes.length > 0 || functionClassNames != null && functionClassNames.length > 0 || otherFunctionTypes != null && otherFunctionTypes.length > 0) { crit.createAlias("sampleComposition.nanoparticleEntityCollection", "nanoEntity", CriteriaSpecification.LEFT_JOIN); Disjunction disjunction = Restrictions.disjunction(); if (nanoparticleEntityClassNames != null && nanoparticleEntityClassNames.length > 0) { Criterion nanoEntityCrit = Restrictions.in("nanoEntity.class", nanoparticleEntityClassNames); disjunction.add(nanoEntityCrit); } if (otherNanoparticleTypes != null && otherNanoparticleTypes.length > 0) { Criterion otherNanoCrit1 = Restrictions.eq("nanoEntity.class", "OtherNanoparticleEntity"); Criterion otherNanoCrit2 = Restrictions.in("nanoEntity.type", otherNanoparticleTypes); Criterion otherNanoCrit = Restrictions.and(otherNanoCrit1, otherNanoCrit2); disjunction.add(otherNanoCrit); } crit.add(disjunction); } // function if (functionClassNames != null && functionClassNames.length > 0 || otherFunctionTypes != null && otherFunctionTypes.length > 0) { Disjunction disjunction = Restrictions.disjunction(); crit.createAlias( "sampleComposition.functionalizingEntityCollection", "funcEntity", CriteriaSpecification.LEFT_JOIN); crit.createAlias("nanoEntity.composingElementCollection", "compElement", CriteriaSpecification.LEFT_JOIN) .createAlias("compElement.inherentFunctionCollection", "inFunc", CriteriaSpecification.LEFT_JOIN); crit.createAlias("funcEntity.functionCollection", "func", CriteriaSpecification.LEFT_JOIN); if (functionClassNames != null && functionClassNames.length > 0) { Criterion funcCrit1 = Restrictions.in("inFunc.class", functionClassNames); Criterion funcCrit2 = Restrictions.in("func.class", functionClassNames); disjunction.add(funcCrit1).add(funcCrit2); } if (otherFunctionTypes != null && otherFunctionTypes.length > 0) { Criterion otherFuncCrit1 = Restrictions.and(Restrictions.eq( "inFunc.class", "OtherFunctionType"), Restrictions.in( "inFunc.type", otherFunctionTypes)); Criterion otherFuncCrit2 = Restrictions.and(Restrictions.eq( "func.class", "OtherFunctionType"), Restrictions.in( "func.type", otherFunctionTypes)); disjunction.add(otherFuncCrit1).add(otherFuncCrit2); } crit.add(disjunction); } // characterization and text if (characterizationClassNames != null && characterizationClassNames.length > 0 || wordList != null && wordList.length > 0) { crit.createAlias("characterizationCollection", "chara", CriteriaSpecification.LEFT_JOIN); if (characterizationClassNames != null && characterizationClassNames.length > 0) { crit.add(Restrictions.in("chara.class", characterizationClassNames)); } if (wordList != null && wordList.length > 0) { // turn words into upper case before searching keywords String[] upperKeywords = new String[wordList.length]; for (int i = 0; i < wordList.length; i++) { upperKeywords[i] = wordList[i].toUpperCase(); } Disjunction disjunction = Restrictions.disjunction(); crit.createAlias("keywordCollection", "keyword1", CriteriaSpecification.LEFT_JOIN); for (String keyword : upperKeywords) { Criterion keywordCrit1 = Restrictions.like("keyword1.name", keyword, MatchMode.ANYWHERE); disjunction.add(keywordCrit1); } crit.createAlias("chara.derivedBioAssayDataCollection", "derived", CriteriaSpecification.LEFT_JOIN) .createAlias("derived.labFile", "charFile", CriteriaSpecification.LEFT_JOIN).createAlias( "charFile.keywordCollection", "keyword2", CriteriaSpecification.LEFT_JOIN); ; for (String keyword : upperKeywords) { Criterion keywordCrit2 = Restrictions.like("keyword2.name", keyword, MatchMode.ANYWHERE); disjunction.add(keywordCrit2); } for (String word : wordList) { Criterion summaryCrit1 = Restrictions.ilike( "chara.description", word, MatchMode.ANYWHERE); Criterion summaryCrit2 = Restrictions.ilike( "charFile.description", word, MatchMode.ANYWHERE); Criterion summaryCrit = Restrictions.or(summaryCrit1, summaryCrit2); disjunction.add(summaryCrit); } crit.add(disjunction); } } crit.setFetchMode("source", FetchMode.JOIN); //eager load not set in caDSR crit.setFetchMode("characterizationCollection", FetchMode.JOIN); crit.setFetchMode("sampleComposition.nanoparticleEntityCollection", FetchMode.JOIN); crit .setFetchMode( "sampleComposition.nanoparticleEntityCollection.composingElementCollection", FetchMode.JOIN); crit .setFetchMode( "sampleComposition.nanoparticleEntityCollection.composingElementCollection.inherentFunctionCollection", FetchMode.JOIN); crit.setFetchMode("sampleComposition.functionalizingEntityCollection", FetchMode.JOIN); crit .setFetchMode( "sampleComposition.functionalizingEntityCollection.functionCollection", FetchMode.JOIN); crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY); CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider .getApplicationService(); List results = appService.query(crit); for (Object obj : results) { NanoparticleSample particle = (NanoparticleSample) obj; particles.add(particle); } // filter by functionalizingEntities // can't use funcEntity.class in the where clause base // table-per-subclass is used for inheritance return filterByFunctionalizingEntities(functionalizingEntityClassNames, otherFunctionalizingEntityTypes, particles); } private List<NanoparticleSample> filterByFunctionalizingEntities( String[] functionalizingEntityClassNames, String[] otherFunctionalizingEntityTypes, List<NanoparticleSample> particles) { List<NanoparticleSample> filtered = new ArrayList<NanoparticleSample>(); if (functionalizingEntityClassNames != null && functionalizingEntityClassNames.length > 0) { for (NanoparticleSample particle : particles) { SortedSet<String> storedEntities = getStoredFunctionalizingEntityClassNames(particle); for (String entity : functionalizingEntityClassNames) { // if at least one functionalizing entity type matches, keep // the particle if (storedEntities.contains(entity)) { filtered.add(particle); break; } } if (otherFunctionalizingEntityTypes != null) { for (String other : otherFunctionalizingEntityTypes) { // if at least one function type matches, keep the // particle if (storedEntities.contains(other)) { filtered.add(particle); break; } } } } } else { filtered = particles; } return filtered; } /** * Return all stored functionalizing entity class names. In case of * OtherFunctionalizingEntity, store the OtherFunctionalizingEntity type * * @param particleSample * @return */ public SortedSet<String> getStoredFunctionalizingEntityClassNames( NanoparticleSample particleSample) { SortedSet<String> storedEntities = new TreeSet<String>(); if (particleSample.getSampleComposition() != null && particleSample.getSampleComposition() .getFunctionalizingEntityCollection() != null) { for (FunctionalizingEntity entity : particleSample .getSampleComposition() .getFunctionalizingEntityCollection()) { if (entity instanceof OtherFunctionalizingEntity) { storedEntities.add(((OtherFunctionalizingEntity) entity) .getType()); } else { storedEntities.add(ClassUtils.getShortClassName(entity .getClass().getCanonicalName())); } } } return storedEntities; } /** * Return all stored function class names. In case of OtherFunction, store * the otherFunction type * * @param particleSample * @return */ public SortedSet<String> getStoredFunctionClassNames( NanoparticleSample particleSample) { SortedSet<String> storedFunctions = new TreeSet<String>(); if (particleSample.getSampleComposition() != null) { if (particleSample.getSampleComposition() .getNanoparticleEntityCollection() != null) { for (NanoparticleEntity entity : particleSample .getSampleComposition() .getNanoparticleEntityCollection()) { if (entity.getComposingElementCollection() != null) { for (ComposingElement element : entity .getComposingElementCollection()) { if (element.getInherentFunctionCollection() != null) { for (Function function : element .getInherentFunctionCollection()) { if (function instanceof OtherFunction) { storedFunctions .add(((OtherFunction) function) .getType()); } else { storedFunctions.add(ClassUtils .getShortClassName(function .getClass() .getCanonicalName())); } } } } } } } if (particleSample.getSampleComposition() .getFunctionalizingEntityCollection() != null) { for (FunctionalizingEntity entity : particleSample .getSampleComposition() .getFunctionalizingEntityCollection()) { if (entity.getFunctionCollection() != null) { for (Function function : entity.getFunctionCollection()) { if (function instanceof OtherFunction) { storedFunctions.add(((OtherFunction) function) .getType()); } else { storedFunctions.add(ClassUtils .getShortClassName(function.getClass() .getCanonicalName())); } } } } } } return storedFunctions; } /** * Return all stored nanoparticle entity class names. In case of * OtherNanoparticleEntity, store the otherNanoparticleEntity type * * @param particleSample * @return */ public SortedSet<String> getStoredNanoparticleEntityClassNames( NanoparticleSample particleSample) { SortedSet<String> storedEntities = new TreeSet<String>(); if (particleSample.getSampleComposition() != null && particleSample.getSampleComposition() .getNanoparticleEntityCollection() != null) { for (NanoparticleEntity entity : particleSample .getSampleComposition().getNanoparticleEntityCollection()) { if (entity instanceof OtherNanoparticleEntity) { storedEntities.add(((OtherNanoparticleEntity) entity) .getType()); } else { storedEntities.add(ClassUtils.getShortClassName(entity .getClass().getCanonicalName())); } } } return storedEntities; } public SortedSet<String> getStoredCharacterizationClassNames( NanoparticleSample particle) { SortedSet<String> storedChars = new TreeSet<String>(); if (particle.getCharacterizationCollection() != null) { for (Characterization achar : particle .getCharacterizationCollection()) { storedChars.add(ClassUtils.getShortClassName(achar.getClass() .getCanonicalName())); } } return storedChars; } public NanoparticleSample findNanoparticleSampleById(String particleId) throws Exception { CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider .getApplicationService(); DetachedCriteria crit = DetachedCriteria.forClass( NanoparticleSample.class).add( Property.forName("id").eq(new Long(particleId))); crit.setFetchMode("source", FetchMode.JOIN); //eager load not set in caDSR crit.setFetchMode("characterizationCollection", FetchMode.JOIN); crit.setFetchMode("sampleComposition.nanoparticleEntityCollection", FetchMode.JOIN); crit .setFetchMode("sampleComposition.labFileCollection", FetchMode.JOIN); crit.setFetchMode("sampleComposition.chemicalAssociationCollection", FetchMode.JOIN); crit .setFetchMode( "sampleComposition.chemicalAssociationCollection.associatedElementA", FetchMode.JOIN); crit .setFetchMode( "sampleComposition.chemicalAssociationCollection.associatedElementB", FetchMode.JOIN); crit.setFetchMode("sampleComposition.functionalizingEntityCollection", FetchMode.JOIN); crit.setFetchMode("reportCollection", FetchMode.JOIN); crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY); List result = appService.query(crit); NanoparticleSample particleSample = null; if (!result.isEmpty()) { particleSample = (NanoparticleSample) result.get(0); } return particleSample; } public NanoparticleSample findNanoparticleSampleByName(String particleName) throws Exception { NanoparticleSample particleSample = null; CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider .getApplicationService(); DetachedCriteria crit = DetachedCriteria.forClass( NanoparticleSample.class).add( Property.forName("name").eq(particleName)); crit.setFetchMode("source", FetchMode.JOIN); //eager load not set in caDSR crit.setFetchMode("characterizationCollection", FetchMode.JOIN); crit.setFetchMode("sampleComposition.nanoparticleEntityCollection", FetchMode.JOIN); crit .setFetchMode("sampleComposition.labFileCollection", FetchMode.JOIN); crit.setFetchMode("sampleComposition.chemicalAssociationCollection", FetchMode.JOIN); crit .setFetchMode( "sampleComposition.chemicalAssociationCollection.associatedElementA", FetchMode.JOIN); crit .setFetchMode( "sampleComposition.chemicalAssociationCollection.associatedElementB", FetchMode.JOIN); crit.setFetchMode("sampleComposition.functionalizingEntityCollection", FetchMode.JOIN); crit.setFetchMode("reportCollection", FetchMode.JOIN); crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY); List result = appService.query(crit); if (!result.isEmpty()) { particleSample = (NanoparticleSample) result.get(0); } return particleSample; } public List<DerivedBioAssayData> findDerivedBioAssayDataByCharId( String charId) throws Exception { List<DerivedBioAssayData> derivedBioAssayDataCollection = new ArrayList<DerivedBioAssayData>(); CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider .getApplicationService(); HQLCriteria crit = new HQLCriteria( "select achar.derivedBioAssayDataCollection from gov.nih.nci.cananolab.domain.particle.characterization.Characterization achar where achar.id = " + charId); List results = appService.query(crit); FileServiceHelper fileHelper = new FileServiceHelper(); for (Object obj : results) { DerivedBioAssayData derivedBioAssayData = (DerivedBioAssayData) obj; // derivedBioAssayData's labfile LabFile labFile = findDerivedBioAssayDataLabFile(derivedBioAssayData .getId().toString()); // labFile's keyword List<Keyword> keywords = fileHelper.findKeywordsByFileId(labFile .getId().toString()); labFile.setKeywordCollection(new HashSet<Keyword>(keywords)); derivedBioAssayData.setLabFile(labFile); derivedBioAssayDataCollection.add(derivedBioAssayData); } return derivedBioAssayDataCollection; } public LabFile findDerivedBioAssayDataLabFile(String derivedId) throws Exception { LabFile labFile = null; CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider .getApplicationService(); HQLCriteria crit = new HQLCriteria( "select bioassay.labFile from gov.nih.nci.cananolab.domain.common.DerivedBioAssayData bioassay where bioassay.id = " + derivedId); List results = appService.query(crit); for (Object obj : results) { labFile = (LabFile) obj; } return labFile; } public List<Keyword> findKeywordsForNanoparticleSampleId( String particleSampleId) throws Exception { List<Keyword> keywords = new ArrayList<Keyword>(); CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider .getApplicationService(); HQLCriteria crit = new HQLCriteria( "select aParticle.keywordCollection from gov.nih.nci.cananolab.domain.particle.NanoparticleSample aParticle where aParticle.id = " + particleSampleId); List results = appService.query(crit); for (Object obj : results) { Keyword keyword = (Keyword) obj; keywords.add(keyword); } return keywords; } public int getNumberOfPublicNanoparticleSamples() throws Exception { CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider .getApplicationService(); List<String> publicData = appService.getPublicData(); HQLCriteria crit = new HQLCriteria( "select name from gov.nih.nci.cananolab.domain.particle.NanoparticleSample"); List results = appService.query(crit); List<String> publicNames = new ArrayList<String>(); for (Object obj : results) { String name = (String) obj.toString(); if (publicData.contains(name)) { publicNames.add(name); } } return publicNames.size(); } public String[] getCharacterizationClassNames(String particleId) throws Exception { String hql = "select distinct achar.class from gov.nih.nci.cananolab.domain.particle.characterization.Characterization achar" + " where achar.nanoparticleSample.id = " + particleId; return this.getClassNames(hql); } public String[] getFunctionalizingEntityClassNames(String particleId) throws Exception { SortedSet<String> names = new TreeSet<String>(); DetachedCriteria crit = DetachedCriteria.forClass( NanoparticleSample.class).add( Property.forName("id").eq(new Long(particleId))); crit.setFetchMode("sampleComposition.functionalizingEntityCollection", FetchMode.JOIN); crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY); CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider .getApplicationService(); List results = appService.query(crit); for (Object obj : results) { NanoparticleSample particleSample = (NanoparticleSample) obj; names = this .getStoredFunctionalizingEntityClassNames(particleSample); } return names.toArray(new String[0]); } public String[] getFunctionClassNames(String particleId) throws Exception { SortedSet<String> names = new TreeSet<String>(); DetachedCriteria crit = DetachedCriteria.forClass( NanoparticleSample.class).add( Property.forName("id").eq(new Long(particleId))); crit.setFetchMode("sampleComposition.nanoparticleEntityCollection", FetchMode.JOIN); crit .setFetchMode( "sampleComposition.nanoparticleEntityCollection.composingElementCollection", FetchMode.JOIN); crit .setFetchMode( "sampleComposition.nanoparticleEntityCollection.composingElementCollection.inherentFunctionCollection", FetchMode.JOIN); crit.setFetchMode("sampleComposition.functionalizingEntityCollection", FetchMode.JOIN); crit .setFetchMode( "sampleComposition.functionalizingEntityCollection.functionCollection", FetchMode.JOIN); crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY); CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider .getApplicationService(); List results = appService.query(crit); for (Object obj : results) { NanoparticleSample particleSample = (NanoparticleSample) obj; names = this.getStoredFunctionClassNames(particleSample); } return names.toArray(new String[0]); } public String[] getNanoparticleEntityClassNames(String particleId) throws Exception { String hql = "select distinct entity.class from " + " gov.nih.nci.cananolab.domain.particle.samplecomposition.base.NanoparticleEntity entity" + " where entity.class!='OtherNanoparticleEntity' and entity.sampleComposition.nanoparticleSample.id = " + particleId; String[] classNames = this.getClassNames(hql); SortedSet<String> names = new TreeSet<String>(); if (classNames.length > 0) { names.addAll(Arrays.asList(classNames)); } String hql2 = "select distinct entity.type from " + " gov.nih.nci.cananolab.domain.particle.samplecomposition.base.OtherNanoparticleEntity entity" + " where entity.sampleComposition.nanoparticleSample.id = " + particleId; String[] otherTypes = this.getClassNames(hql2); if (otherTypes.length > 0) { names.addAll(Arrays.asList(otherTypes)); } return names.toArray(new String[0]); } private String[] getClassNames(String hql) throws Exception { String[] classNames = null; CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider .getApplicationService(); HQLCriteria crit = new HQLCriteria(hql); List results = appService.query(crit); if (results != null) { classNames = new String[results.size()]; } else { classNames = new String[0]; } int i = 0; for (Object obj : results) { classNames[i] = (String) obj.toString(); i++; } return classNames; } public String[] getNanoparticleSampleViewStrs( List<NanoparticleSample> particleSamples) { List<String> particleStrings = new ArrayList<String>(particleSamples .size()); for (NanoparticleSample particleSample : particleSamples) { List<String> columns = new ArrayList<String>(); columns.add(particleSample.getId().toString()); columns.add(particleSample.getName()); columns.add(particleSample.getSource().getOrganizationName()); columns.add(StringUtils.join( getStoredNanoparticleEntityClassNames(particleSample), CaNanoLabConstants.VIEW_CLASSNAME_DELIMITER)); columns.add(StringUtils.join( getStoredFunctionalizingEntityClassNames(particleSample), CaNanoLabConstants.VIEW_CLASSNAME_DELIMITER)); columns.add(StringUtils.join( getStoredFunctionClassNames(particleSample), CaNanoLabConstants.VIEW_CLASSNAME_DELIMITER)); columns.add(StringUtils.join( getStoredCharacterizationClassNames(particleSample), CaNanoLabConstants.VIEW_CLASSNAME_DELIMITER)); particleStrings.add(StringUtils.join(columns, CaNanoLabConstants.VIEW_COL_DELIMITER)); } String[] particleStrArray = new String[particleStrings.size()]; return particleStrings.toArray(particleStrArray); } }
package com.google.step.YOUR_PROJECT_NAME_HERE.external; import com.google.step.YOUR_PROJECT_NAME_HERE.data.Card; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.*; import java.util.regex.*; import org.apache.http.HttpEntity; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.json.JSONObject; import org.jsoup.Jsoup; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; import org.jsoup.select.Elements; public final class StackOverflowClient { private static final String SEARCH_URL_TEMPLATE = "https://api.stackexchange.com/2.2/questions/%s?" + "order=desc&sort=activity&site=stackoverflow"; private static final String QUESTION_URL_TEMPLATE = "https://api.stackexchange.com/2.2/questions/%s/answers?" + "order=desc&sort=votes&site=stackoverflow"; // This url specify filter to generate answer body. private static final String ANSWER_URL_TEMPLATE = "https://api.stackexchange.com/2.2/answers/%s?order" + "=desc&sort=activity&site=stackoverflow&filter=!9_bDE(fI5"; private static final int ID_INDEX = 2; private static final String ITEM_PARAMETER = "items"; private static final String TITLE_PARAMETER = "title"; private static final String BODY_PARAMETER = "body"; private static final String CODE_PARAMETER = "code"; private static final String ANSWER_ID_PARAMETER = "answer_id"; private static final int DESCRIPTION_LENGTH_PARAMETER = 200; public Card search(String url) { try { Card card = getQuestion(url); if (card == null) { return null; } card = getAnswerId(card); card = getAnswer(card); return card; } catch (URISyntaxException e) { // Return null card if no valid card available. return null; } } /* Get the question id based on URL from the CSE result. */ private Card getQuestion(String url) throws URISyntaxException { Card card = new Card(); URI uri = new URI(url); // Parse the URL to get the question id. String[] segments = uri.getPath().split("/"); String questionId = segments[ID_INDEX]; if (!Pattern.matches("[0-9]+", questionId)) { return null; } String searchUrl = String.format(SEARCH_URL_TEMPLATE, questionId); try { JSONObject res = getResponse(searchUrl); String title = res.getJSONArray(ITEM_PARAMETER).getJSONObject(0).get(TITLE_PARAMETER).toString(); card.setLink(uri.toString()); // Store the id of the question in order to get the code body of the answer. card.setCode(questionId); card.setTitle(title); } catch (IOException e) { e.printStackTrace(); } return card; } /* Get the most voted answer's id and store it in the card. */ private Card getAnswerId(Card card) { String questionUrl = String.format(QUESTION_URL_TEMPLATE, card.getCode()); try { JSONObject res = getResponse(questionUrl); String answerId = res.getJSONArray(ITEM_PARAMETER).getJSONObject(0).get(ANSWER_ID_PARAMETER).toString(); // Replace the question id by the answer id in order to retrieve the code body next. card.setCode(answerId); } catch (IOException e) { e.printStackTrace(); } return card; } /* Get the content of the answer and store it in the card. */ private Card getAnswer(Card card) { String answerUrl = String.format(ANSWER_URL_TEMPLATE, card.getCode()); try { JSONObject res = getResponse(answerUrl); String body = res.getJSONArray(ITEM_PARAMETER).getJSONObject(0).get(BODY_PARAMETER).toString(); Document doc = Jsoup.parse(body); // Combine all description in the answer body. Elements descriptionHtml = doc.select("p"); String description = ""; for (Element e : descriptionHtml) { description += e.outerHtml(); } description = description.substring(0, DESCRIPTION_LENGTH_PARAMETER); // Combine all code in the answer body. Elements codeHtml = doc.select(CODE_PARAMETER); String code = ""; for (Element e : codeHtml) { code += e.outerHtml(); } card.setDescription(description); card.setCode(code); } catch (IOException e) { e.printStackTrace(); } return card; } private JSONObject getResponse(String url) throws IOException { CloseableHttpClient httpClient = HttpClients.createDefault(); CloseableHttpResponse response = httpClient.execute(new HttpGet(url)); if (response.getStatusLine().getStatusCode() != 200) { return new JSONObject(); } HttpEntity entity = response.getEntity(); if (entity == null) { return new JSONObject(); } BufferedReader reader = new BufferedReader(new InputStreamReader(entity.getContent())); StringBuilder responseBody = new StringBuilder(); String line; while ((line = reader.readLine()) != null) { responseBody.append(line); } return new JSONObject(responseBody.toString()); } }
package edu.kit.ipd.crowdcontrol.objectservice.crowdworking; import edu.kit.ipd.crowdcontrol.objectservice.database.model.enums.TaskStatus; import edu.kit.ipd.crowdcontrol.objectservice.database.model.tables.records.PlatformRecord; import edu.kit.ipd.crowdcontrol.objectservice.database.model.tables.records.TaskRecord; import edu.kit.ipd.crowdcontrol.objectservice.database.model.tables.records.WorkerRecord; import edu.kit.ipd.crowdcontrol.objectservice.database.operations.PlatformOperations; import edu.kit.ipd.crowdcontrol.objectservice.database.operations.TasksOperations; import edu.kit.ipd.crowdcontrol.objectservice.database.operations.WorkerOperations; import edu.kit.ipd.crowdcontrol.objectservice.proto.Experiment; import edu.kit.ipd.crowdcontrol.objectservice.proto.Worker; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.function.Function; import java.util.stream.Collectors; public class PlatformManager { private final Map<String, Platform> platforms; private final WorkerIdentification fallbackWorker; private final Payment fallbackPayment; private TasksOperations tasksOps; private WorkerOperations workerOps; /** * Create a new manager for platforms. The known platforms in the database will be deleted, * and filled with the new. * * @param crowdPlatforms The list of crowdplatforms to be managed by this manager, * will be used to setup the list of platforms in the database * @param fallbackWorker handler which is called if a platform does not support identifying a worker * for this case need_email on the platform is set and the email which got entered by the worker * should be set as some param * @param fallbackPayment handler which is called if a platform does not support payment * @param tasksOps Used for the task operations on the database * @param platformOps Used for the platform operations on the database * @param workerOps Used for the worker operations on the database */ public PlatformManager(List<Platform> crowdPlatforms, WorkerIdentification fallbackWorker, Payment fallbackPayment, TasksOperations tasksOps, PlatformOperations platformOps, WorkerOperations workerOps) { this.tasksOps = tasksOps; this.fallbackWorker = fallbackWorker; this.fallbackPayment = fallbackPayment; this.workerOps = workerOps; //create hashmap of platforms platforms = crowdPlatforms.stream() .collect(Collectors.toMap(Platform::getName, Function.identity())); //clear database platformOps.deleteAllPlatforms(); //update database platforms.forEach((s, platform) -> { PlatformRecord rec = new PlatformRecord(); rec.setName(platform.getName()); rec.setNeedsEmail(false); rec.setNeedsEmail(isNeedemail(platform)); rec.setRenderCalibrations(platform.isCalibrationAllowed()); platformOps.createPlatform(rec); }); } private boolean isNeedemail(Platform platform) { boolean needemail = false; /* platform does not handle payment, email is needed for internal payment */ if (!platform.getPayment().isPresent()) needemail = true; /* if platform cannot identify worker, we need to do that with a email adress */ if (!platform.getWorker().isPresent()) needemail = true; return needemail; } /** * Returns if the given Platform needs a email or not * @param name name of the platform * @return true if the platform needs an email, false if not */ public boolean getNeedemail(String name) { return isNeedemail( getPlatform(name).orElseThrow(() -> new IllegalArgumentException("Platform not found")) ); } /** * Will get you the instance of a platform interface of a platform, this instance is the same for all calls * @param name The name of the instance to use * @return The optional crowd platform instance */ public Optional<Platform> getPlatform(String name) { return Optional.ofNullable(platforms.get(name)); } /** * Will return the Worker interface which should be used to identify workers for the given platform * * @param name The name of the platform * @return The interface used to identify a worker */ public WorkerIdentification getWorker(String name) { return getPlatform(name) .orElseThrow(() -> new IllegalArgumentException("Platform not found")) .getWorker().orElse(fallbackWorker); } /** * Will return the payment service which should be used for the given platform * If there is no Platform with the given name None is returned. * * @param name The name of the platform to use * @return The interface used for payment */ public Payment getPlatformPayment(String name) { return getPlatform(name) .orElseThrow(() -> new IllegalArgumentException("Platform not found")) .getPayment().orElse(fallbackPayment); } /** * Publish the given experiment on the platform. * The method will update the database with the new public task * * @param name The name of the platform * @param experiment The experiment to publish * @return None if the platform does not exist */ public CompletableFuture<Boolean> publishTask(String name, Experiment experiment) throws TaskOperationException { TaskRecord record = new TaskRecord(); record.setExperiment(experiment.getId()); record.setStatus(TaskStatus.running); record.setCrowdPlatform(name); TaskRecord result = tasksOps.createTask(record); if (result == null) throw new TaskOperationException("Task could not be created"); return getPlatform(name) .map(platform1 -> platform1.publishTask(experiment)) .orElseThrow(() -> new IllegalArgumentException("Platform not found!")) .handle((s1, throwable) -> { if (s1 != null) { result.setPlatformData(s1); } else { result.setStatus(TaskStatus.stopped); } if (!tasksOps.updateTask(result)) { throw new IllegalStateException("Updating record for published task failed"); } return true; }); } /** * Unpublish a given experiment from the given platform * * @param name The name of the platform * @param experiment The experiment to unpublish * @return None if the platform was not found, false if the unpublish failed and true if everything went fine */ public CompletableFuture<Boolean> unpublishTask(String name, Experiment experiment) throws TaskOperationException { TaskRecord record; record = tasksOps.getTask(name, experiment.getId()).orElse(null); if (record == null) return CompletableFuture.completedFuture(true); return getPlatform(name).map(platform -> platform.unpublishTask(record.getPlatformData())) .orElseThrow(() -> new IllegalArgumentException("Experiment not found!")) .thenApply(aBoolean -> { record.setStatus(TaskStatus.finished); return tasksOps.updateTask(record); }); } /** * update the given experiment on the given platform * @param name The name of the platform * @param experiment The experiment to update * @return None if the platform was not found, false if the update failed and true if everything went fine. */ public CompletableFuture<Boolean> updateTask(String name, Experiment experiment) throws TaskOperationException { TaskRecord record; record = tasksOps.getTask(name, experiment.getId()). orElseThrow(() -> new TaskOperationException("Experiment is not published")); return getPlatform(name) .map(platform -> platform.updateTask(record.getPlatformData(), experiment)) .orElseThrow(() -> new IllegalArgumentException("Platform not found")) .thenApply(s -> { record.setPlatformData(s); return tasksOps.updateTask(record); }); } /** * Parse a worker id out of the params which got passed by a platform * @param name The name of the platform * @param params Params passed by the platform * @return A String if the platform exists * @throws UnidentifiedWorkerException if the user can not be found by the platform code */ public String identifyWorker(String name, Map<String, String[]> params) throws UnidentifiedWorkerException { return getWorker(name).identifyWorker(params); } /** * Get a worker if he exists * @param name Name of the platform * @param params Params passed by the platform * @return A worker if one is found * @throws UnidentifiedWorkerException if the platform does not identify a worker */ public Optional<WorkerRecord> getWorker(String name, Map<String ,String[]> params) throws UnidentifiedWorkerException { return getWorker(name).getWorker(workerOps,name,params); } /** * Pay a worker * @param name The name of the platform * @param worker Worker to pay * @param amount The amount of money * @return A completable future which returns the success of the call */ public CompletableFuture<Boolean> payWorker(String name, Worker worker, int amount) { return getPlatformPayment(name).payWorker(worker, amount); } }
package org.cyclops.integrateddynamics.core.tileentity; import com.google.common.base.Optional; import lombok.Getter; import lombok.Setter; import lombok.experimental.Delegate; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.util.EnumFacing; import net.minecraftforge.common.capabilities.Capability; import net.minecraftforge.common.property.IExtendedBlockState; import org.cyclops.cyclopscore.block.property.ExtendedBlockStateBuilder; import org.cyclops.cyclopscore.datastructure.EnumFacingMap; import org.cyclops.cyclopscore.helper.MinecraftHelpers; import org.cyclops.cyclopscore.persist.nbt.NBTPersist; import org.cyclops.cyclopscore.tileentity.CyclopsTileEntity; import org.cyclops.integrateddynamics.api.block.IFacadeable; import org.cyclops.integrateddynamics.api.block.cable.ICableFakeable; import org.cyclops.integrateddynamics.api.network.INetwork; import org.cyclops.integrateddynamics.api.network.INetworkCarrier; import org.cyclops.integrateddynamics.api.part.PartRenderPosition; import org.cyclops.integrateddynamics.block.BlockCable; import org.cyclops.integrateddynamics.capability.cable.CableConfig; import org.cyclops.integrateddynamics.capability.cable.CableFakeableConfig; import org.cyclops.integrateddynamics.capability.cable.CableFakeableMultipartTicking; import org.cyclops.integrateddynamics.capability.cable.CableTileMultipartTicking; import org.cyclops.integrateddynamics.capability.dynamiclight.DynamicLightConfig; import org.cyclops.integrateddynamics.capability.dynamiclight.DynamicLightTileMultipartTicking; import org.cyclops.integrateddynamics.capability.dynamicredstone.DynamicRedstoneConfig; import org.cyclops.integrateddynamics.capability.dynamicredstone.DynamicRedstoneTileMultipartTicking; import org.cyclops.integrateddynamics.capability.facadeable.FacadeableConfig; import org.cyclops.integrateddynamics.capability.facadeable.FacadeableTileMultipartTicking; import org.cyclops.integrateddynamics.capability.network.NetworkCarrierConfig; import org.cyclops.integrateddynamics.capability.network.NetworkCarrierDefault; import org.cyclops.integrateddynamics.capability.networkelementprovider.NetworkElementProviderConfig; import org.cyclops.integrateddynamics.capability.networkelementprovider.NetworkElementProviderPartContainer; import org.cyclops.integrateddynamics.capability.partcontainer.PartContainerConfig; import org.cyclops.integrateddynamics.capability.partcontainer.PartContainerTileMultipartTicking; import org.cyclops.integrateddynamics.capability.path.PathElementConfig; import org.cyclops.integrateddynamics.capability.path.PathElementTile; import org.cyclops.integrateddynamics.client.model.CableRenderState; import org.cyclops.integrateddynamics.core.helper.PartHelpers; import java.util.Objects; /** * A ticking part entity which is made up of different parts. * @author Ruben Taelman */ public class TileMultipartTicking extends CyclopsTileEntity implements CyclopsTileEntity.ITickingTile, PartHelpers.IPartStateHolderCallback { @Delegate protected final ITickingTile tickingTileComponent = new TickingTileComponent(this); @Getter @NBTPersist private EnumFacingMap<Boolean> connected = EnumFacingMap.newMap(); @NBTPersist private EnumFacingMap<Boolean> forceDisconnected = EnumFacingMap.newMap(); @Getter @NBTPersist private EnumFacingMap<Integer> redstoneLevels = EnumFacingMap.newMap(); @Getter @NBTPersist private EnumFacingMap<Boolean> redstoneInputs = EnumFacingMap.newMap(); @Getter @NBTPersist private EnumFacingMap<Integer> lightLevels = EnumFacingMap.newMap(); private EnumFacingMap<Integer> previousLightLevels; @Getter @Setter @NBTPersist private String facadeBlockName = null; @Getter @Setter @NBTPersist private int facadeMeta = 0; @Getter private final PartContainerTileMultipartTicking partContainer; @Getter private final CableTileMultipartTicking cable; @Getter private final INetworkCarrier networkCarrier; @Getter private final ICableFakeable cableFakeable; private IExtendedBlockState cachedState = null; public TileMultipartTicking() { partContainer = new PartContainerTileMultipartTicking(this); addCapabilityInternal(PartContainerConfig.CAPABILITY, partContainer); addCapabilityInternal(NetworkElementProviderConfig.CAPABILITY, new NetworkElementProviderPartContainer(partContainer)); addCapabilityInternal(FacadeableConfig.CAPABILITY, new FacadeableTileMultipartTicking(this)); cable = new CableTileMultipartTicking(this); addCapabilityInternal(CableConfig.CAPABILITY, cable); networkCarrier = new NetworkCarrierDefault(); addCapabilityInternal(NetworkCarrierConfig.CAPABILITY, networkCarrier); cableFakeable = new CableFakeableMultipartTicking(this); addCapabilityInternal(CableFakeableConfig.CAPABILITY, cableFakeable); addCapabilityInternal(PathElementConfig.CAPABILITY, new PathElementTile(this, cable)); for (EnumFacing facing : EnumFacing.VALUES) { addCapabilitySided(DynamicLightConfig.CAPABILITY, facing, new DynamicLightTileMultipartTicking(this, facing)); addCapabilitySided(DynamicRedstoneConfig.CAPABILITY, facing, new DynamicRedstoneTileMultipartTicking(this, facing)); } } @Override public NBTTagCompound writeToNBT(NBTTagCompound tag) {this.markDirty(); tag = super.writeToNBT(tag); tag.setTag("partContainer", partContainer.serializeNBT()); tag.setBoolean("realCable", cableFakeable.isRealCable()); return tag; } @Override public void readFromNBT(NBTTagCompound tag) { EnumFacingMap<Boolean> lastConnected = EnumFacingMap.newMap(connected); String lastFacadeBlockName = facadeBlockName; int lastFacadeMeta = facadeMeta; boolean lastRealCable = cableFakeable.isRealCable(); if (tag.hasKey("parts", MinecraftHelpers.NBTTag_Types.NBTTagList.ordinal()) && !tag.hasKey("partContainer", MinecraftHelpers.NBTTag_Types.NBTTagCompound.ordinal())) { // Backwards compatibility with old part saving. // TODO: remove in next major MC update. PartHelpers.readPartsFromNBT(getNetwork(), getPos(), tag, partContainer.getPartData(), getWorld()); } else { partContainer.deserializeNBT(tag.getCompoundTag("partContainer")); } super.readFromNBT(tag); cableFakeable.setRealCable(tag.getBoolean("realCable")); if (getWorld() != null && (lastConnected == null || connected == null || !lastConnected.equals(connected) || !Objects.equals(lastFacadeBlockName, facadeBlockName) || lastFacadeMeta != facadeMeta || lastRealCable != cableFakeable.isRealCable())) { getWorld().markBlockRangeForRenderUpdate(getPos(), getPos()); } } @Override public void onUpdateReceived() { if(!lightLevels.equals(previousLightLevels)) { previousLightLevels = lightLevels; getWorld().checkLight(getPos()); } cachedState = null; } public IExtendedBlockState getConnectionState() { if (cachedState != null) { return cachedState; } ExtendedBlockStateBuilder builder = ExtendedBlockStateBuilder.builder((IExtendedBlockState) getBlock().getDefaultState()); if (partContainer.getPartData() != null) { // Can be null in rare cases where rendering happens before data sync builder.withProperty(BlockCable.REALCABLE, cableFakeable.isRealCable()); if (connected.isEmpty()) { getCable().updateConnections(); } for (EnumFacing side : EnumFacing.VALUES) { builder.withProperty(BlockCable.CONNECTED[side.ordinal()], !cable.isForceDisconnected(side) && connected.get(side)); builder.withProperty(BlockCable.PART_RENDERPOSITIONS[side.ordinal()], partContainer.hasPart(side) ? partContainer.getPart(side).getPartRenderPosition() : PartRenderPosition.NONE); } IFacadeable facadeable = getCapability(FacadeableConfig.CAPABILITY, null); builder.withProperty(BlockCable.FACADE, facadeable.hasFacade() ? Optional.of(facadeable.getFacade()) : Optional.absent()); builder.withProperty(BlockCable.PARTCONTAINER, partContainer); builder.withProperty(BlockCable.RENDERSTATE, new CableRenderState( this.cableFakeable.isRealCable(), EnumFacingMap.newMap(this.connected), EnumFacingMap.newMap(this.partContainer.getPartData()), facadeBlockName, facadeMeta )); } return cachedState = builder.build(); } @Override protected void updateTileEntity() { super.updateTileEntity(); if (connected.isEmpty()) { cable.updateConnections(); } partContainer.update(); } public void updateRedstoneInfo(EnumFacing side) { sendUpdate(); getWorld().notifyNeighborsOfStateChange(getPos(), getBlockType()); getWorld().notifyNeighborsOfStateChange(pos.offset(side.getOpposite()), getBlockType()); } public void updateLightInfo() { sendUpdate(); } public INetwork getNetwork() { return networkCarrier.getNetwork(); } @Override public void onSet(PartHelpers.PartStateHolder<?, ?> partStateHolder) { } /** * @return The raw force disconnection data. */ public EnumFacingMap<Boolean> getForceDisconnected() { return this.forceDisconnected; } public void setForceDisconnected(EnumFacingMap<Boolean> forceDisconnected) { this.forceDisconnected.clear(); this.forceDisconnected.putAll(forceDisconnected); } @Override public boolean canRenderBreaking() { return true; } @Override public boolean shouldRenderInPass(int pass) { return true; } @Override public boolean hasCapability(Capability<?> capability, EnumFacing facing) { return super.hasCapability(capability, facing) || partContainer.hasCapability(capability, facing); } @Override public <T> T getCapability(Capability<T> capability, EnumFacing facing) { T value = super.getCapability(capability, facing); if (value != null) { return value; } return partContainer.getCapability(capability, facing); } }
package org.mastodon.trackmate.ui.wizard.descriptors; import java.awt.Font; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.util.List; import java.util.Map; import javax.swing.DefaultComboBoxModel; import javax.swing.JComboBox; import javax.swing.JLabel; import javax.swing.JPanel; import org.mastodon.linking.mamut.SpotLinkerOp; import org.mastodon.revised.mamut.WindowManager; import org.mastodon.trackmate.PluginProvider; import org.mastodon.trackmate.Settings; import org.mastodon.trackmate.TrackMate; import org.mastodon.trackmate.ui.wizard.WizardController; import org.mastodon.trackmate.ui.wizard.WizardPanelDescriptor; import org.scijava.Context; import org.scijava.Contextual; import org.scijava.NullContextException; import org.scijava.log.LogService; import org.scijava.plugin.Parameter; public class ChooseLinkerDescriptor extends WizardPanelDescriptor implements Contextual { public static final String IDENTIFIER = "Linker selection"; @Parameter private Context context; @Parameter private LogService log; private PluginProvider< SpotLinkerDescriptor > descriptorProvider; private final DefaultComboBoxModel< String > model; private List< String > names; private Map< String, String > descriptions; private List< Class< ? extends SpotLinkerOp > > classes; private final WizardController controller; private String nextDescriptorIdentifier = "Not null"; // FIXME private final TrackMate trackmate; private final WindowManager windowManager; private SpotLinkerDescriptor previousLinkerPanel = null; public ChooseLinkerDescriptor( final TrackMate trackmate, final WizardController controller, final WindowManager windowManager ) { this.trackmate = trackmate; this.controller = controller; this.windowManager = windowManager; this.model = new DefaultComboBoxModel<>(); this.targetPanel = new ChooseDetectorPanel(); this.panelIdentifier = IDENTIFIER; } @Override public void setContext( final Context context ) { context.inject( this ); final PluginProvider< SpotLinkerOp > linkerProvider = new PluginProvider<>( SpotLinkerOp.class ); context.inject( linkerProvider ); this.names = linkerProvider.getVisibleNames(); this.descriptions = linkerProvider.getDescriptions(); this.classes = linkerProvider.getClasses(); this.descriptorProvider = new PluginProvider<>( SpotLinkerDescriptor.class ); context.inject( descriptorProvider ); } @Override public void aboutToDisplayPanel() { int indexOf = 0; final Settings settings = trackmate.getSettings(); final Class< ? extends SpotLinkerOp > linkerClass = settings.values.getLinker(); if ( null != linkerClass ) { indexOf = classes.indexOf( linkerClass ); if ( indexOf < -1 ) log.error( "Unkown linker class: " + linkerClass ); } model.removeAllElements(); for ( final String name : names ) model.addElement( name ); model.setSelectedItem( names.get( indexOf ) ); } @Override public void aboutToHidePanel() { final String name = ( String ) model.getSelectedItem(); final Class< ? extends SpotLinkerOp > linkerClass = classes.get( names.indexOf( name ) ); final Settings settings = trackmate.getSettings(); settings.linker( linkerClass ); /* * Determine and register the next descriptor. */ final List< String > linkerPanelNames = descriptorProvider.getNames(); for ( final String key : linkerPanelNames ) { final SpotLinkerDescriptor linkerPanel = descriptorProvider.getInstance( key ); if ( linkerPanel.getTargetClasses().contains( linkerClass ) ) { if ( linkerPanel == previousLinkerPanel ) return; previousLinkerPanel = linkerPanel; if (linkerPanel.getContext() == null) context().inject( linkerPanel ); final Map< String, Object > defaultSettings = linkerPanel.getDefaultSettings(); settings.linkerSettings( defaultSettings ); linkerPanel.setTrackMate( trackmate ); linkerPanel.setWindowManager( windowManager ); linkerPanel.getPanelComponent().setSize( targetPanel.getSize() ); controller.registerWizardPanel( linkerPanel ); nextDescriptorIdentifier = linkerPanel.getPanelDescriptorIdentifier(); return; } } throw new RuntimeException( "Could not find a descriptor that can configure " + linkerClass ); } @Override public String getNextPanelDescriptorIdentifier() { return nextDescriptorIdentifier; } private class ChooseDetectorPanel extends JPanel { private static final long serialVersionUID = 1L; public ChooseDetectorPanel() { final GridBagLayout layout = new GridBagLayout(); layout.columnWidths = new int[] { 80, 80 }; layout.columnWeights = new double[] { 0.5, 0.5 }; layout.rowHeights = new int[] { 0, 0, 0, 26 }; layout.rowWeights = new double[] { 0., 0., 0., 1.0 }; setLayout( layout ); final GridBagConstraints gbc = new GridBagConstraints(); gbc.gridy = 0; gbc.gridx = 0; gbc.gridwidth = 2; gbc.anchor = GridBagConstraints.BASELINE_LEADING; gbc.fill = GridBagConstraints.HORIZONTAL; gbc.insets = new Insets( 5, 5, 5, 5 ); final JLabel title = new JLabel( "Linker selection." ); title.setFont( getFont().deriveFont( Font.BOLD ) ); add( title, gbc ); final JLabel lblPick = new JLabel( "Pick a spot linker:" ); gbc.gridy = 1; gbc.anchor = GridBagConstraints.SOUTHWEST; add( lblPick, gbc ); gbc.gridy = 2; gbc.gridx = 0; gbc.gridwidth = 2; gbc.fill = GridBagConstraints.HORIZONTAL; gbc.anchor = GridBagConstraints.NORTHWEST; gbc.insets = new Insets( 5, 5, 5, 5 ); final JComboBox< String > comboBox = new JComboBox<>( model ); add( comboBox, gbc ); final JLabel lblInfo = new JLabel(); lblInfo.setFont( getFont().deriveFont( getFont().getSize2D() - 2f ) ); gbc.fill = GridBagConstraints.BOTH; gbc.gridy = 3; gbc.weighty = 1.; gbc.anchor = GridBagConstraints.EAST; add( lblInfo, gbc ); comboBox.addActionListener( ( e ) -> lblInfo.setText( descriptions.get( model.getSelectedItem() ) ) ); } } // -- Contextual methods -- @Override public Context context() { if ( context == null ) throw new NullContextException(); return context; } @Override public Context getContext() { return context; } }
package org.xbib.elasticsearch.river.jdbc.strategy.simple; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.common.Base64; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.joda.time.DateTime; import org.elasticsearch.common.joda.time.format.DateTimeFormat; import org.elasticsearch.common.joda.time.format.DateTimeFormatter; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; import org.xbib.elasticsearch.river.jdbc.RiverSource; import org.xbib.elasticsearch.river.jdbc.support.RiverContext; import org.xbib.elasticsearch.river.jdbc.support.SimpleValueListener; import org.xbib.elasticsearch.river.jdbc.support.ValueListener; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; import java.math.BigDecimal; import java.security.NoSuchAlgorithmException; import java.sql.Array; import java.sql.Blob; import java.sql.CallableStatement; import java.sql.Clob; import java.sql.Connection; import java.sql.Date; import java.sql.DriverManager; import java.sql.NClob; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLDataException; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.sql.SQLNonTransientConnectionException; import java.sql.SQLXML; import java.sql.Statement; import java.sql.Time; import java.sql.Timestamp; import java.sql.Types; import java.text.NumberFormat; import java.text.ParseException; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; public class SimpleRiverSource implements RiverSource { private final ESLogger logger = ESLoggerFactory.getLogger(SimpleRiverSource.class.getSimpleName()); private final Map<String, Object> lastRow = new HashMap<String, Object>(); protected RiverContext context; protected String url; protected String driver; protected String user; protected String password; protected Connection readConnection; protected Connection writeConnection; private int rounding; private int scale = -1; public SimpleRiverSource() { } protected ESLogger logger() { return logger; } @Override public String strategy() { return "simple"; } @Override public SimpleRiverSource riverContext(RiverContext context) { this.context = context; return this; } @Override public SimpleRiverSource driver(String driver) { this.driver = driver; try { // TODO: do we need this? older drivers? Class.forName(driver); } catch (ClassNotFoundException ex) { logger().error(ex.getMessage(), ex); } return this; } public String driver() { return driver; } @Override public SimpleRiverSource url(String url) { this.url = url; return this; } public String url() { return url; } @Override public SimpleRiverSource user(String user) { this.user = user; return this; } @Override public SimpleRiverSource password(String password) { this.password = password; return this; } /** * Get JDBC connection for reading * * @return the connection * @throws SQLException */ @Override public Connection connectionForReading() throws SQLException { boolean cond = readConnection == null || readConnection.isClosed(); try { cond = cond || !readConnection.isValid(5); } catch (AbstractMethodError e) { // old/buggy JDBC driver logger().debug(e.getMessage()); } catch (SQLFeatureNotSupportedException e) { // postgresql does not support isValid() logger().debug(e.getMessage()); } if (cond) { int retries = context != null ? context.retries() : 1; while (retries > 0) { retries try { readConnection = DriverManager.getConnection(url, user, password); // required by MySQL for large result streaming readConnection.setReadOnly(true); // Postgresql cursor mode condition: // fetchsize > 0, no scrollable result set, no auto commit, no holdable cursors over commit // https://github.com/pgjdbc/pgjdbc/blob/master/org/postgresql/jdbc2/AbstractJdbc2Statement.java#L514 //readConnection.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); if (context != null) { // many drivers don't like autocommit=true readConnection.setAutoCommit(context.autocommit()); } return readConnection; } catch (SQLException e) { logger().error("while opening read connection: " + url + " " + e.getMessage(), e); try { Thread.sleep(context != null ? context.maxRetryWait().millis() : 1000L); } catch (InterruptedException ex) { // do nothing } } } } return readConnection; } /** * Get JDBC connection for writing * * @return the connection * @throws SQLException */ @Override public Connection connectionForWriting() throws SQLException { boolean cond = writeConnection == null || writeConnection.isClosed(); try { cond = cond || !writeConnection.isValid(5); } catch (AbstractMethodError e) { // old JDBC driver } catch (SQLFeatureNotSupportedException e) { // postgresql does not support isValid() } if (cond) { int retries = context != null ? context.retries() : 1; while (retries > 0) { retries try { writeConnection = DriverManager.getConnection(url, user, password); if (context != null) { // many drivers don't like autocommit=true writeConnection.setAutoCommit(context.autocommit()); } return writeConnection; } catch (SQLNonTransientConnectionException e) { // ignore derby drop=true } catch (SQLException e) { logger().error("while opening write connection: " + url + " " + e.getMessage(), e); try { Thread.sleep(context != null ? context.maxRetryWait().millis() : 1000L); } catch (InterruptedException ex) { // do nothing } } } } return writeConnection; } @Override public String fetch() throws SQLException, IOException { String mergeDigest = null; if (context.pollStatementParams().isEmpty()) { Statement statement = null; ResultSet results = null; try { // Postgresql: do not use prepareStatement. // Postgresql requires direct use of executeQuery(sql) for cursor with fetchsize statement = connectionForReading().createStatement(); results = executeQuery(statement, getSql()); ValueListener listener = new SimpleValueListener() .target(context.riverMouth()) .digest(context.digesting()); mergeDigest = merge(results, listener); } catch (Exception e) { throw new IOException(e); } finally { close(results); close(statement); acknowledge(); closeReading(); closeWriting(); } } else if (context.callable()) { // call stored procedure CallableStatement statement = null; ResultSet results = null; try { statement = connectionForReading().prepareCall(getSql()); bind(statement, context.pollStatementParams()); results = executeQuery(statement); ValueListener listener = new SimpleValueListener() .target(context.riverMouth()) .digest(context.digesting()); mergeDigest = merge(results, listener); } catch (Exception e) { throw new IOException(e); } finally { close(results); close(statement); acknowledge(); closeReading(); closeWriting(); } } else { PreparedStatement statement = null; ResultSet results = null; try { statement = prepareQuery(getSql()); bind(statement, context.pollStatementParams()); results = executeQuery(statement); ValueListener listener = new SimpleValueListener() .target(context.riverMouth()) .digest(context.digesting()); mergeDigest = merge(results, listener); } catch (Exception e) { throw new IOException(e); } finally { close(results); close(statement); acknowledge(); closeReading(); closeWriting(); } } return mergeDigest; } /** * Merge rows. * * @param results the ResultSet * @param listener * @return a digest of the merged row content * @throws IOException * @throws java.security.NoSuchAlgorithmException * @throws SQLException */ public String merge(ResultSet results, ValueListener listener) throws SQLException, IOException, ParseException, NoSuchAlgorithmException { long rows = 0L; beforeFirstRow(results, listener); while (nextRow(results, listener)) { rows++; } if (rows > 0) { logger().info("merged {} rows", rows); } else { logger().info("no rows to merge"); } listener.reset(); return context.digesting() && listener.digest() != null ? Base64.encodeBytes(listener.digest().digest()) : null; } /** * Send acknowledge SQL command if exists. * * @throws SQLException */ public void acknowledge() throws SQLException { // send acknowledge statement if defined if (context.pollAckStatement() != null) { Connection connection = connectionForWriting(); PreparedStatement statement = prepareUpdate(context.pollAckStatement()); if (context.pollAckStatementParams() != null) { bind(statement, context.pollAckStatementParams()); } statement.execute(); close(statement); try { if (!connection.getAutoCommit()) { connection.commit(); } } catch (SQLException e) { // Can't call commit when autocommit=true } closeWriting(); } } private String getSql() throws IOException { String sql = context.pollStatement(); if (sql.endsWith(".sql")) { Reader r = new InputStreamReader(new FileInputStream(sql), "UTF-8"); sql = Streams.copyToString(r); r.close(); } return sql; } /** * Prepare a query statement * * @param sql * @return a prepared statement * @throws SQLException */ @Override public PreparedStatement prepareQuery(String sql) throws SQLException { Connection connection = connectionForReading(); if (connection == null) { throw new SQLException("can't connect to source " + url); } logger().debug("preparing statement with SQL {}", sql); return connection.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); } /** * Prepare a query statement * * @param sql * @return a prepared statement * @throws SQLException */ @Override public PreparedStatement prepareUpdate(String sql) throws SQLException { if (sql.endsWith(".sql")) { try { Reader r = new InputStreamReader(new FileInputStream(sql), "UTF-8"); sql = Streams.copyToString(r); r.close(); } catch (IOException e) { throw new SQLException("file not found: " + sql); } } Connection connection = connectionForWriting(); if (connection == null) { throw new SQLException("can't connect to source " + url); } return connection.prepareStatement(sql); } /** * Bind values to prepared statement * * @param pstmt * @param values * @throws SQLException */ @Override public SimpleRiverSource bind(PreparedStatement pstmt, List<? extends Object> values) throws SQLException { if (values == null) { logger().warn("no values given for bind"); return this; } for (int i = 1; i <= values.size(); i++) { bind(pstmt, i, values.get(i - 1)); } return this; } /** * Execute prepared query statement * * @param statement * @return the result set * @throws SQLException */ @Override public ResultSet executeQuery(PreparedStatement statement) throws SQLException { statement.setMaxRows(context.maxRows()); statement.setFetchSize(context.fetchSize()); logger().debug("executing prepared statement"); return statement.executeQuery(); } /** * Execute query statement * * @param sql * @return the result set * @throws SQLException */ @Override public ResultSet executeQuery(Statement statement, String sql) throws SQLException { statement.setMaxRows(context.maxRows()); statement.setFetchSize(context.fetchSize()); logger().debug("executing SQL {}", sql); return statement.executeQuery(sql); } /** * Execute prepared update statement * * @param statement * @return the result set * @throws SQLException */ @Override public RiverSource executeUpdate(PreparedStatement statement) throws SQLException { statement.executeUpdate(); if (!writeConnection.getAutoCommit()) { writeConnection.commit(); } return this; } public void beforeFirstRow(ResultSet result, ValueListener listener) throws SQLException, IOException, ParseException { ResultSetMetaData metadata = result.getMetaData(); int columns = metadata.getColumnCount(); List<String> keys = new LinkedList(); for (int i = 1; i <= columns; i++) { keys.add(metadata.getColumnLabel(i)); } if (listener != null) { listener.keys(keys); } } /** * Get next row and prepare the values for processing. The labels of each * columns are used for the ValueListener as paths for JSON object merging. * * @param result the result set * @param listener the listener * @return true if row exists and was processed, false otherwise * @throws SQLException * @throws IOException */ @Override public boolean nextRow(ResultSet result, ValueListener listener) throws SQLException, IOException, ParseException { if (result.next()) { processRow(result, listener); return true; } return false; } private void processRow(ResultSet result, ValueListener listener) throws SQLException, IOException, ParseException { Locale locale = context != null ? context.locale() != null ? context.locale() : Locale.getDefault() : Locale.getDefault(); List<Object> values = new LinkedList(); ResultSetMetaData metadata = result.getMetaData(); int columns = metadata.getColumnCount(); lastRow.clear(); for (int i = 1; i <= columns; i++) { Object value = parseType(result, i, metadata.getColumnType(i), locale); if (logger().isTraceEnabled()) { logger().trace("value={} class={}", value, value != null ? value.getClass().getName() : ""); } values.add(value); lastRow.put("$row." + metadata.getColumnLabel(i), result.getObject(i)); } if (listener != null) { listener.values(values); } } /** * Close result set * * @param result * @throws SQLException */ @Override public SimpleRiverSource close(ResultSet result) throws SQLException { if (result != null) { result.close(); } return this; } /** * Close statement * * @param statement * @throws SQLException */ @Override public SimpleRiverSource close(Statement statement) throws SQLException { if (statement != null) { statement.close(); } return this; } /** * Close read connection * * @throws SQLException */ @Override public SimpleRiverSource closeReading() { try { if (readConnection != null) { // always commit before close to finish cursors/transactions if (!readConnection.getAutoCommit()) { readConnection.commit(); } if (!readConnection.isClosed()) { readConnection.close(); } } } catch (SQLException e) { logger().warn("while closing read connection: " + e.getMessage()); } return this; } /** * Close read connection * * @throws SQLException */ @Override public SimpleRiverSource closeWriting() { try { if (writeConnection != null) { // always commit before close to finish cursors/transactions if (!writeConnection.getAutoCommit()) { writeConnection.commit(); } if (!writeConnection.isClosed()) { writeConnection.close(); } } } catch (SQLException e) { logger().warn("while closing write connection: " + e.getMessage()); } return this; } @Override public SimpleRiverSource acknowledge(BulkResponse response) throws IOException { // no, we do not acknowledge bulk in this strategy return this; } @Override public SimpleRiverSource rounding(String rounding) { if ("ceiling".equalsIgnoreCase(rounding)) { this.rounding = BigDecimal.ROUND_CEILING; } else if ("down".equalsIgnoreCase(rounding)) { this.rounding = BigDecimal.ROUND_DOWN; } else if ("floor".equalsIgnoreCase(rounding)) { this.rounding = BigDecimal.ROUND_FLOOR; } else if ("halfdown".equalsIgnoreCase(rounding)) { this.rounding = BigDecimal.ROUND_HALF_DOWN; } else if ("halfeven".equalsIgnoreCase(rounding)) { this.rounding = BigDecimal.ROUND_HALF_EVEN; } else if ("halfup".equalsIgnoreCase(rounding)) { this.rounding = BigDecimal.ROUND_HALF_UP; } else if ("unnecessary".equalsIgnoreCase(rounding)) { this.rounding = BigDecimal.ROUND_UNNECESSARY; } else if ("up".equalsIgnoreCase(rounding)) { this.rounding = BigDecimal.ROUND_UP; } return this; } @Override public SimpleRiverSource precision(int scale) { this.scale = scale; return this; } private static final String ISO_FORMAT_SECONDS = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; private static final String FORMAT_SECONDS = "yyyy-MM-dd HH:mm:ss"; private final static DateTimeFormatter df = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); public static String formatNow() { return formatDateISO(new java.util.Date()); } public static String formatDateISO(long millis) { return new DateTime(millis).toString(ISO_FORMAT_SECONDS); } public static String formatDateStandard(java.util.Date date) { if (date == null) { return null; } return new DateTime(date).toString(FORMAT_SECONDS); } public synchronized static String formatDateISO(java.util.Date date) { if (date == null) { return null; } return new DateTime(date).toString(ISO_FORMAT_SECONDS); } public synchronized static java.util.Date parseDateISO(String value) { if (value == null) { return null; } try { return df.parseDateTime(value).toDate(); } catch (Exception e) { // ignore } try { return DateTimeFormat.forPattern("yyyy-MM-dd").parseDateTime(value).toDate(); } catch (Exception e) { return null; } } public synchronized static java.util.Date parseDate(String value) { if (value == null) { return null; } try { return DateTimeFormat.forPattern(FORMAT_SECONDS).parseDateTime(value).toDate(); } catch (Exception e) { } try { return DateTimeFormat.forPattern("yyyy-MM-dd").parseDateTime(value).toDate(); } catch (Exception e) { return null; } } private void bind(PreparedStatement pstmt, int i, Object value) throws SQLException { if (value == null) { pstmt.setNull(i, Types.VARCHAR); } else if (value instanceof String) { String s = (String) value; if ("$now".equals(s)) { pstmt.setDate(i, new Date(new java.util.Date().getTime())); } else if ("$job".equals(s)) { logger().debug("job = {}", context.job()); pstmt.setString(i, context.job()); } else { Object rowValue = lastRow.get(s); if (rowValue != null) { logger().debug("{} = {}", s, rowValue); pstmt.setObject(i, rowValue); } else { pstmt.setString(i, (String) value); } } } else if (value instanceof Integer) { pstmt.setInt(i, (Integer) value); } else if (value instanceof Long) { pstmt.setLong(i, (Long) value); } else if (value instanceof BigDecimal) { pstmt.setBigDecimal(i, (BigDecimal) value); } else if (value instanceof Date) { pstmt.setDate(i, (Date) value); } else if (value instanceof Timestamp) { pstmt.setTimestamp(i, (Timestamp) value); } else if (value instanceof Float) { pstmt.setFloat(i, (Float) value); } else if (value instanceof Double) { pstmt.setDouble(i, (Double) value); } else { pstmt.setObject(i, value); } } /** * Parse of value of resultset with the good type * * @param result * @param i * @param type * @param locale * @return The parse value * @throws SQLException * @throws IOException */ @Override public Object parseType(ResultSet result, Integer i, int type, Locale locale) throws SQLException, IOException, ParseException { if (logger().isTraceEnabled()) { logger().trace("{} {} {}", i, type, result.getString(i)); } switch (type) { /** * The JDBC types CHAR, VARCHAR, and LONGVARCHAR are closely * related. CHAR represents a small, fixed-length character string, * VARCHAR represents a small, variable-length character string, and * LONGVARCHAR represents a large, variable-length character string. */ case Types.CHAR: case Types.VARCHAR: case Types.LONGVARCHAR: { return result.getString(i); } case Types.NCHAR: case Types.NVARCHAR: case Types.LONGNVARCHAR: { return result.getNString(i); } /** * The JDBC types BINARY, VARBINARY, and LONGVARBINARY are closely * related. BINARY represents a small, fixed-length binary value, * VARBINARY represents a small, variable-length binary value, and * LONGVARBINARY represents a large, variable-length binary value */ case Types.BINARY: case Types.VARBINARY: case Types.LONGVARBINARY: { return result.getBytes(i); } /** * The JDBC type ARRAY represents the SQL3 type ARRAY. * * An ARRAY value is mapped to an instance of the Array interface in * the Java programming language. If a driver follows the standard * implementation, an Array object logically points to an ARRAY * value on the server rather than containing the elements of the * ARRAY object, which can greatly increase efficiency. The Array * interface contains methods for materializing the elements of the * ARRAY object on the client in the form of either an array or a * ResultSet object. */ case Types.ARRAY: { Array a = result.getArray(i); return a != null ? a.toString() : null; } /** * The JDBC type BIGINT represents a 64-bit signed integer value * between -9223372036854775808 and 9223372036854775807. * * The corresponding SQL type BIGINT is a nonstandard extension to * SQL. In practice the SQL BIGINT type is not yet currently * implemented by any of the major databases, and we recommend that * its use be avoided in code that is intended to be portable. * * The recommended Java mapping for the BIGINT type is as a Java * long. */ case Types.BIGINT: { Object o = result.getLong(i); return result.wasNull() ? null : o; } /** * The JDBC type BIT represents a single bit value that can be zero * or one. * * SQL-92 defines an SQL BIT type. However, unlike the JDBC BIT * type, this SQL-92 BIT type can be used as a parameterized type to * define a fixed-length binary string. Fortunately, SQL-92 also * permits the use of the simple non-parameterized BIT type to * represent a single binary digit, and this usage corresponds to * the JDBC BIT type. Unfortunately, the SQL-92 BIT type is only * required in "full" SQL-92 and is currently supported by only a * subset of the major databases. Portable code may therefore prefer * to use the JDBC SMALLINT type, which is widely supported. */ case Types.BIT: { try { Object o = result.getInt(i); return result.wasNull() ? null : o; } catch (Exception e) { // PSQLException: Bad value for type int : t if (e.getMessage().startsWith("Bad value for type int")) { return "t".equals(result.getString(i)); } throw new IOException(e); } } /** * The JDBC type BOOLEAN, which is new in the JDBC 3.0 API, maps to * a boolean in the Java programming language. It provides a * representation of true and false, and therefore is a better match * than the JDBC type BIT, which is either 1 or 0. */ case Types.BOOLEAN: { return result.getBoolean(i); } /** * The JDBC type BLOB represents an SQL3 BLOB (Binary Large Object). * * A JDBC BLOB value is mapped to an instance of the Blob interface * in the Java programming language. If a driver follows the * standard implementation, a Blob object logically points to the * BLOB value on the server rather than containing its binary data, * greatly improving efficiency. The Blob interface provides methods * for materializing the BLOB data on the client when that is * desired. */ case Types.BLOB: { Blob blob = result.getBlob(i); if (blob != null) { long n = blob.length(); if (n > Integer.MAX_VALUE) { throw new IOException("can't process blob larger than Integer.MAX_VALUE"); } byte[] tab = blob.getBytes(1, (int) n); blob.free(); return tab; } break; } /** * The JDBC type CLOB represents the SQL3 type CLOB (Character Large * Object). * * A JDBC CLOB value is mapped to an instance of the Clob interface * in the Java programming language. If a driver follows the * standard implementation, a Clob object logically points to the * CLOB value on the server rather than containing its character * data, greatly improving efficiency. Two of the methods on the * Clob interface materialize the data of a CLOB object on the * client. */ case Types.CLOB: { Clob clob = result.getClob(i); if (clob != null) { long n = clob.length(); if (n > Integer.MAX_VALUE) { throw new IOException("can't process clob larger than Integer.MAX_VALUE"); } String str = clob.getSubString(1, (int) n); clob.free(); return str; } break; } case Types.NCLOB: { NClob nclob = result.getNClob(i); if (nclob != null) { long n = nclob.length(); if (n > Integer.MAX_VALUE) { throw new IOException("can't process nclob larger than Integer.MAX_VALUE"); } String str = nclob.getSubString(1, (int) n); nclob.free(); return str; } break; } /** * The JDBC type DATALINK, new in the JDBC 3.0 API, is a column * value that references a file that is outside of a data source but * is managed by the data source. It maps to the Java type * java.net.URL and provides a way to manage external files. For * instance, if the data source is a DBMS, the concurrency controls * it enforces on its own data can be applied to the external file * as well. * * A DATALINK value is retrieved from a ResultSet object with the * ResultSet methods getURL or getObject. If the Java platform does * not support the type of URL returned by getURL or getObject, a * DATALINK value can be retrieved as a String object with the * method getString. * * java.net.URL values are stored in a database using the method * setURL. If the Java platform does not support the type of URL * being set, the method setString can be used instead. * * */ case Types.DATALINK: { return result.getURL(i); } /** * The JDBC DATE type represents a date consisting of day, month, * and year. The corresponding SQL DATE type is defined in SQL-92, * but it is implemented by only a subset of the major databases. * Some databases offer alternative SQL types that support similar * semantics. */ case Types.DATE: { try { Date d = result.getDate(i); return d != null ? formatDateISO(d.getTime()) : null; } catch (SQLException e) { return null; } } case Types.TIME: { try { Time t = result.getTime(i); return t != null ? formatDateISO(t.getTime()) : null; } catch (SQLException e) { return null; } } case Types.TIMESTAMP: { try { Timestamp t = result.getTimestamp(i); return t != null ? formatDateISO(t.getTime()) : null; } catch (SQLException e) { // java.sql.SQLException: Cannot convert value '0000-00-00 00:00:00' from column ... to TIMESTAMP. return null; } } /** * The JDBC types DECIMAL and NUMERIC are very similar. They both * represent fixed-precision decimal values. * * The corresponding SQL types DECIMAL and NUMERIC are defined in * SQL-92 and are very widely implemented. These SQL types take * precision and scale parameters. The precision is the total number * of decimal digits supported, and the scale is the number of * decimal digits after the decimal point. For most DBMSs, the scale * is less than or equal to the precision. So for example, the value * "12.345" has a precision of 5 and a scale of 3, and the value * ".11" has a precision of 2 and a scale of 2. JDBC requires that * all DECIMAL and NUMERIC types support both a precision and a * scale of at least 15. * * The sole distinction between DECIMAL and NUMERIC is that the * SQL-92 specification requires that NUMERIC types be represented * with exactly the specified precision, whereas for DECIMAL types, * it allows an implementation to add additional precision beyond * that specified when the type was created. Thus a column created * with type NUMERIC(12,4) will always be represented with exactly * 12 digits, whereas a column created with type DECIMAL(12,4) might * be represented by some larger number of digits. * * The recommended Java mapping for the DECIMAL and NUMERIC types is * java.math.BigDecimal. The java.math.BigDecimal type provides math * operations to allow BigDecimal types to be added, subtracted, * multiplied, and divided with other BigDecimal types, with integer * types, and with floating point types. * * The method recommended for retrieving DECIMAL and NUMERIC values * is ResultSet.getBigDecimal. JDBC also allows access to these SQL * types as simple Strings or arrays of char. Thus, Java programmers * can use getString to receive a DECIMAL or NUMERIC result. * However, this makes the common case where DECIMAL or NUMERIC are * used for currency values rather awkward, since it means that * application writers have to perform math on strings. It is also * possible to retrieve these SQL types as any of the Java numeric * types. */ case Types.DECIMAL: case Types.NUMERIC: { BigDecimal bd = null; try { bd = result.getBigDecimal(i); } catch (NullPointerException e) { // getBigDecimal() should get obsolete. Most seem to use getString/getObject anyway... // But is it true? JDBC NPE exists since 13 years? // Null values are driving us nuts in JDBC: } if (bd == null || result.wasNull()) { return null; } if (scale >= 0) { bd = bd.setScale(scale, rounding); try { long l = bd.longValueExact(); // TODO argh if (Long.toString(l).equals(result.getString(i))) { return l; } else { return bd.doubleValue(); } } catch (ArithmeticException e) { return bd.doubleValue(); } } else { return bd.toPlainString(); } } /** * The JDBC type DOUBLE represents a "double precision" floating * point number that supports 15 digits of mantissa. * * The corresponding SQL type is DOUBLE PRECISION, which is defined * in SQL-92 and is widely supported by the major databases. The * SQL-92 standard leaves the precision of DOUBLE PRECISION up to * the implementation, but in practice all the major databases * supporting DOUBLE PRECISION support a mantissa precision of at * least 15 digits. * * The recommended Java mapping for the DOUBLE type is as a Java * double. */ case Types.DOUBLE: { String s = result.getString(i); if (result.wasNull() || s == null) { return null; } NumberFormat format = NumberFormat.getInstance(locale); Number number = format.parse(s); return number.doubleValue(); } /** * The JDBC type FLOAT is basically equivalent to the JDBC type * DOUBLE. We provided both FLOAT and DOUBLE in a possibly misguided * attempt at consistency with previous database APIs. FLOAT * represents a "double precision" floating point number that * supports 15 digits of mantissa. * * The corresponding SQL type FLOAT is defined in SQL-92. The SQL-92 * standard leaves the precision of FLOAT up to the implementation, * but in practice all the major databases supporting FLOAT support * a mantissa precision of at least 15 digits. * * The recommended Java mapping for the FLOAT type is as a Java * double. However, because of the potential confusion between the * double precision SQL FLOAT and the single precision Java float, * we recommend that JDBC programmers should normally use the JDBC * DOUBLE type in preference to FLOAT. */ case Types.FLOAT: { String s = result.getString(i); if (result.wasNull() || s == null) { return null; } NumberFormat format = NumberFormat.getInstance(locale); Number number = format.parse(s); return number.doubleValue(); } /** * The JDBC type JAVA_OBJECT, added in the JDBC 2.0 core API, makes * it easier to use objects in the Java programming language as * values in a database. JAVA_OBJECT is simply a type code for an * instance of a class defined in the Java programming language that * is stored as a database object. The type JAVA_OBJECT is used by a * database whose type system has been extended so that it can store * Java objects directly. The JAVA_OBJECT value may be stored as a * serialized Java object, or it may be stored in some * vendor-specific format. * * The type JAVA_OBJECT is one of the possible values for the column * DATA_TYPE in the ResultSet objects returned by various * DatabaseMetaData methods, including getTypeInfo, getColumns, and * getUDTs. The method getUDTs, part of the new JDBC 2.0 core API, * will return information about the Java objects contained in a * particular schema when it is given the appropriate parameters. * Having this information available facilitates using a Java class * as a database type. */ case Types.OTHER: case Types.JAVA_OBJECT: { return result.getObject(i); } /** * The JDBC type REAL represents a "single precision" floating point * number that supports seven digits of mantissa. * * The corresponding SQL type REAL is defined in SQL-92 and is * widely, though not universally, supported by the major databases. * The SQL-92 standard leaves the precision of REAL up to the * implementation, but in practice all the major databases * supporting REAL support a mantissa precision of at least seven * digits. * * The recommended Java mapping for the REAL type is as a Java * float. */ case Types.REAL: { String s = result.getString(i); if (result.wasNull() || s == null) { return null; } NumberFormat format = NumberFormat.getInstance(locale); Number number = format.parse(s); return number.doubleValue(); } /** * The JDBC type TINYINT represents an 8-bit integer value between 0 * and 255 that may be signed or unsigned. * * The corresponding SQL type, TINYINT, is currently supported by * only a subset of the major databases. Portable code may therefore * prefer to use the JDBC SMALLINT type, which is widely supported. * * The recommended Java mapping for the JDBC TINYINT type is as * either a Java byte or a Java short. The 8-bit Java byte type * represents a signed value from -128 to 127, so it may not always * be appropriate for larger TINYINT values, whereas the 16-bit Java * short will always be able to hold all TINYINT values. */ /** * The JDBC type SMALLINT represents a 16-bit signed integer value * between -32768 and 32767. * * The corresponding SQL type, SMALLINT, is defined in SQL-92 and is * supported by all the major databases. The SQL-92 standard leaves * the precision of SMALLINT up to the implementation, but in * practice, all the major databases support at least 16 bits. * * The recommended Java mapping for the JDBC SMALLINT type is as a * Java short. */ /** * The JDBC type INTEGER represents a 32-bit signed integer value * ranging between -2147483648 and 2147483647. * * The corresponding SQL type, INTEGER, is defined in SQL-92 and is * widely supported by all the major databases. The SQL-92 standard * leaves the precision of INTEGER up to the implementation, but in * practice all the major databases support at least 32 bits. * * The recommended Java mapping for the INTEGER type is as a Java * int. */ case Types.TINYINT: case Types.SMALLINT: case Types.INTEGER: { try { Integer integer = result.getInt(i); return result.wasNull() ? null : integer; } catch (SQLDataException e) { Long l = result.getLong(i); return result.wasNull() ? null : l; } } case Types.SQLXML: { SQLXML xml = result.getSQLXML(i); return xml != null ? xml.getString() : null; } case Types.NULL: { return null; } /** * The JDBC type DISTINCT field (Types class)>DISTINCT represents * the SQL3 type DISTINCT. * * The standard mapping for a DISTINCT type is to the Java type to * which the base type of a DISTINCT object would be mapped. For * example, a DISTINCT type based on a CHAR would be mapped to a * String object, and a DISTINCT type based on an SQL INTEGER would * be mapped to an int. * * The DISTINCT type may optionally have a custom mapping to a class * in the Java programming language. A custom mapping consists of a * class that implements the interface SQLData and an entry in a * java.util.Map object. */ case Types.DISTINCT: { logger().warn("JDBC type not implemented: {}", type); return null; } /** * The JDBC type STRUCT represents the SQL99 structured type. An SQL * structured type, which is defined by a user with a CREATE TYPE * statement, consists of one or more attributes. These attributes * may be any SQL data type, built-in or user-defined. * * The standard mapping for the SQL type STRUCT is to a Struct * object in the Java programming language. A Struct object contains * a value for each attribute of the STRUCT value it represents. * * A STRUCT value may optionally be custom mapped to a class in the * Java programming language, and each attribute in the STRUCT may * be mapped to a field in the class. A custom mapping consists of a * class that implements the interface SQLData and an entry in a * java.util.Map object. * * */ case Types.STRUCT: { logger().warn("JDBC type not implemented: {}", type); return null; } case Types.REF: { logger().warn("JDBC type not implemented: {}", type); return null; } case Types.ROWID: { logger().warn("JDBC type not implemented: {}", type); return null; } default: { logger().warn("unknown JDBC type ignored: {}", type); return null; } } return null; } }
package net.java.sip.communicator.impl.gui.main.configforms; import java.util.*; import java.util.List; import java.awt.*; import java.awt.event.*; import javax.swing.*; import javax.swing.table.*; import org.osgi.framework.*; import net.java.sip.communicator.impl.gui.*; import net.java.sip.communicator.impl.gui.customcontrols.*; import net.java.sip.communicator.impl.gui.i18n.*; import net.java.sip.communicator.impl.gui.main.*; import net.java.sip.communicator.impl.gui.main.account.*; import net.java.sip.communicator.impl.gui.utils.*; import net.java.sip.communicator.impl.gui.utils.Constants; import net.java.sip.communicator.service.configuration.*; import net.java.sip.communicator.service.gui.*; import net.java.sip.communicator.service.protocol.*; import net.java.sip.communicator.util.*; /** * The <tt>AccountsConfigurationForm</tt> is the form where the user * could create, modify or delete an account. * * @author Yana Stamcheva */ public class AccountsConfigurationForm extends JPanel implements ConfigurationForm, ActionListener, ServiceListener { private Logger logger = Logger.getLogger( AccountsConfigurationForm.class.getName()); private JScrollPane tablePane = new JScrollPane(); private JTable accountsTable = new JTable(); private JPanel rightPanel = new JPanel(new BorderLayout()); private JPanel buttonsPanel = new JPanel(new GridLayout(0, 1, 8, 8)); private JButton newButton = new JButton(Messages.getString("new")); private JButton modifyButton = new JButton(Messages.getString("modify")); private JButton removeButton = new JButton(Messages.getString("remove")); private ExtendedTableModel tableModel = new ExtendedTableModel(); private MainFrame mainFrame; /** * Creates an instance of <tt>AccountsConfigurationForm</tt>. * * @param mainFrame the main application window */ public AccountsConfigurationForm(MainFrame mainFrame) { super(new BorderLayout()); this.mainFrame = mainFrame; GuiActivator.bundleContext.addServiceListener(this); this.tableInit(); this.buttonsPanelInit(); this.add(tablePane, BorderLayout.CENTER); this.add(rightPanel, BorderLayout.EAST); } /** * Initializes the buttons panel. */ private void buttonsPanelInit() { this.newButton.addActionListener(this); this.modifyButton.addActionListener(this); this.removeButton.addActionListener(this); this.newButton.setMnemonic( Messages.getString("mnemonic.newAccount").charAt(0)); this.modifyButton.setMnemonic( Messages.getString("mnemonic.modifyAccount").charAt(0)); this.removeButton.setMnemonic( Messages.getString("mnemonic.removeAccount").charAt(0)); this.buttonsPanel.add(newButton); this.buttonsPanel.add(modifyButton); this.buttonsPanel.add(removeButton); this.rightPanel.setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5)); this.rightPanel.add(buttonsPanel, BorderLayout.NORTH); } /** * Initializes the accounts table. */ private void tableInit() { accountsTable.setRowHeight(22); accountsTable.setSelectionMode( ListSelectionModel.SINGLE_SELECTION); accountsTable.setShowHorizontalLines(false); accountsTable.setShowVerticalLines(false); accountsTable.setModel(tableModel); tableModel.addColumn("id"); tableModel.addColumn(Messages.getString("protocol")); tableModel.addColumn(Messages.getString("account")); TableColumnModel columnModel = accountsTable.getColumnModel(); columnModel.removeColumn(columnModel.getColumn(0)); columnModel.getColumn(0) .setCellRenderer(new LabelTableCellRenderer()); columnModel.getColumn(1) .setCellRenderer(new LabelTableCellRenderer()); this.initializeAccountsTable(); this.tablePane.getViewport().add(accountsTable); } /** * From all protocol provider factories obtains all already registered * accounts and adds them to the table. */ private void initializeAccountsTable() { Set set = GuiActivator.getProtocolProviderFactories().entrySet(); Iterator iter = set.iterator(); while (iter.hasNext()) { Map.Entry entry = (Map.Entry) iter.next(); ProtocolProviderFactory providerFactory = (ProtocolProviderFactory) entry.getValue(); ArrayList accountsList = providerFactory.getRegisteredAccounts(); AccountID accountID; ServiceReference serRef; ProtocolProviderService protocolProvider; for (int i = 0; i < accountsList.size(); i ++) { accountID = (AccountID) accountsList.get(i); serRef = providerFactory .getProviderForAccount(accountID); protocolProvider = (ProtocolProviderService) GuiActivator.bundleContext .getService(serRef); String pName = protocolProvider.getProtocolName(); JLabel protocolLabel = new JLabel(); protocolLabel.setText(pName); protocolLabel.setIcon( new ImageIcon(Constants.getProtocolIcon(pName))); tableModel.addRow(new Object[]{protocolProvider, protocolLabel, accountID.getUserID()}); } } } /** * Returns the title of this configuration form. * @return the title of this configuration form. */ public String getTitle() { return Messages.getString("accounts"); } /** * Returns the icon of this configuration form. * @return the icon of this configuration form. */ public byte[] getIcon() { return ImageLoader.getImageInBytes( ImageLoader.QUICK_MENU_ADD_ICON); } /** * Returns the form of this configuration form. * @return the form of this configuration form. */ public Object getForm() { return this; } /** * Handles the <tt>ActionEvent</tt> triggered when user clicks on * on the buttons. Shows the account registration wizard when user * clicks on "New". */ public void actionPerformed(ActionEvent e) { JButton sourceButton = (JButton)e.getSource(); if (sourceButton.equals(newButton)) { AccountRegWizardContainerImpl wizard = (AccountRegWizardContainerImpl)GuiActivator.getUIService() .getAccountRegWizardContainer(); wizard.setTitle( Messages.getString("accountRegistrationWizard")); wizard.setLocation( Toolkit.getDefaultToolkit().getScreenSize().width/2 - 250, Toolkit.getDefaultToolkit().getScreenSize().height/2 - 100 ); wizard.newAccount(); wizard.showModalDialog(); } else if (sourceButton.equals(modifyButton)) { if(accountsTable.getSelectedRow() != -1) { AccountRegWizardContainerImpl wizard = (AccountRegWizardContainerImpl)GuiActivator.getUIService() .getAccountRegWizardContainer(); wizard.setTitle( Messages.getString("accountRegistrationWizard")); wizard.setLocation( Toolkit.getDefaultToolkit().getScreenSize().width/2 - 250, Toolkit.getDefaultToolkit().getScreenSize().height/2 - 100 ); ProtocolProviderService protocolProvider = (ProtocolProviderService)tableModel.getValueAt( accountsTable.getSelectedRow(), 0); wizard.modifyAccount(protocolProvider); wizard.showModalDialog(); } } else if(sourceButton.equals(removeButton)){ if(accountsTable.getSelectedRow() != -1) { ProtocolProviderService protocolProvider = (ProtocolProviderService)tableModel.getValueAt( accountsTable.getSelectedRow(), 0); ProtocolProviderFactory providerFactory = GuiActivator.getProtocolProviderFactory(protocolProvider); if(providerFactory != null) { int result = JOptionPane.showConfirmDialog(this, Messages.getString("removeAccountMessage"), Messages.getString("removeAccount"), JOptionPane.YES_NO_CANCEL_OPTION); if(result == JOptionPane.YES_OPTION) { ConfigurationService configService = GuiActivator.getConfigurationService(); String prefix = "net.java.sip.communicator.impl.ui.accounts"; List accounts = configService .getPropertyNamesByPrefix(prefix, true); Iterator accountsIter = accounts.iterator(); while(accountsIter.hasNext()) { String accountRootPropName = (String) accountsIter.next(); String accountUID = configService.getString(accountRootPropName); if(accountUID.equals(protocolProvider .getAccountID().getAccountUniqueID())) { configService.setProperty( accountRootPropName, null); break; } } providerFactory.uninstallAccount( protocolProvider.getAccountID()); } } } } } /** * Implements the <tt>ServiceListener</tt> method. Verifies whether the * passed event concerns a <tt>ProtocolProviderService</tt> and adds the * corresponding UI controls. * * @param event The <tt>ServiceEvent</tt> object. */ public void serviceChanged(ServiceEvent event) { Object service = GuiActivator.bundleContext .getService(event.getServiceReference()); // we don't care if the source service is not a protocol provider if (! (service instanceof ProtocolProviderService)) { return; } ProtocolProviderService pps = (ProtocolProviderService) service; if (event.getType() == ServiceEvent.REGISTERED) { String pName = pps.getProtocolName(); JLabel protocolLabel = new JLabel(); protocolLabel.setText(pName); protocolLabel.setIcon( new ImageIcon(Constants.getProtocolIcon(pName))); tableModel.addRow(new Object[]{pps, protocolLabel, pps.getAccountID().getUserID()}); } else if (event.getType() == ServiceEvent.UNREGISTERING) { tableModel.removeRow(tableModel.rowIndexOf(pps)); } } }
package com.elytradev.movingworld.common.experiments; import com.elytradev.concrete.reflect.accessor.Accessor; import com.elytradev.concrete.reflect.accessor.Accessors; import com.elytradev.movingworld.client.experiments.InputReader; import com.elytradev.movingworld.client.experiments.MovingWorldClientDatabase; import com.elytradev.movingworld.common.experiments.network.messages.server.MessageDimensionPoolData; import com.elytradev.movingworld.common.experiments.region.RegionPool; import com.elytradev.movingworld.common.experiments.world.MWPlayerChunkMap; import com.elytradev.movingworld.common.experiments.world.MWServerWorldEventHandler; import com.elytradev.movingworld.common.experiments.world.MovingWorldProvider; import com.google.common.collect.HashBiMap; import net.minecraft.client.Minecraft; import net.minecraft.client.multiplayer.WorldClient; import net.minecraft.nbt.CompressedStreamTools; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.server.management.PlayerChunkMap; import net.minecraft.world.DimensionType; import net.minecraft.world.ServerWorldEventHandler; import net.minecraft.world.WorldServer; import net.minecraftforge.common.DimensionManager; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.world.WorldEvent; import net.minecraftforge.fml.common.Mod; import net.minecraftforge.fml.common.event.FMLServerStoppedEvent; import net.minecraftforge.fml.common.event.FMLServerStoppingEvent; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; import net.minecraftforge.fml.common.gameevent.PlayerEvent; import net.minecraftforge.fml.common.gameevent.TickEvent; import net.minecraftforge.fml.relauncher.Side; import java.io.File; import java.io.IOException; import java.text.MessageFormat; import java.util.HashMap; /** * Handles init and deinit for subworld registration, region pool io, etc. */ public class MovingWorldInitHandler { public static final int startingDimID = 50; /** * ParentWorldID->SubWorldID */ public static HashBiMap<Integer, Integer> registeredDimensions = HashBiMap.create(); public static int activeDimID = startingDimID; private Accessor<PlayerChunkMap> playerChunkMap; public MovingWorldInitHandler() { this.playerChunkMap = Accessors.findField(WorldServer.class, "playerChunkMap", "field_73063_M"); } @SubscribeEvent public void onWorldSave(WorldEvent.Save e) { // Save pool for dimension. if (!registeredDimensions.containsValue(e.getWorld().provider.getDimension())) return; int subWorldID = registeredDimensions.inverse().get(e.getWorld().provider.getDimension()); RegionPool poolForDim = RegionPool.getPool(subWorldID, false); savePoolToFile(e.getWorld().provider.getDimension(), poolForDim); } private void savePoolToFile(int parentDimension, RegionPool pool) { File saveDir = DimensionManager.getCurrentSaveRootDirectory(); if (saveDir == null || pool == null) return; File activePoolDir = new File(saveDir, "movingworld-pools"); File oldPoolDir = new File(activePoolDir, "old"); if (!activePoolDir.exists()) { activePoolDir.mkdir(); } if (!oldPoolDir.exists()) { oldPoolDir.mkdir(); } try { NBTTagCompound poolCompound = pool.writePoolToCompound(); File regionPool = new File(activePoolDir, MessageFormat.format("poolD{0}.dat", parentDimension)); if (regionPool.exists()) { regionPool.renameTo(new File(oldPoolDir, MessageFormat.format("poolD{0}.dat", parentDimension))); } else { regionPool.createNewFile(); } regionPool = new File(activePoolDir, MessageFormat.format("poolD{0}.dat", parentDimension)); CompressedStreamTools.write(poolCompound, regionPool); } catch (IOException e) { MovingWorldExperimentsMod.logger.error(MessageFormat.format("Failed to write pool data from file for dimension {0}", parentDimension), e); } } private boolean isPoolDataPresent(int parentDimension) { File saveDir = DimensionManager.getCurrentSaveRootDirectory(); if (saveDir == null) return false; File activePoolDir = new File(saveDir, "movingworld-pools"); return new File(activePoolDir, MessageFormat.format("poolD{0}.dat", parentDimension)).exists(); } private void readPoolFromFile(int parentDimension, int subWorldDimension) { File saveDir = DimensionManager.getCurrentSaveRootDirectory(); if (saveDir == null) return; File activePoolDir = new File(saveDir, "movingworld-pools"); try { File regionPool = new File(activePoolDir, MessageFormat.format("poolD{0}.dat", parentDimension)); NBTTagCompound poolCompound = CompressedStreamTools.read(regionPool); RegionPool.getPool(subWorldDimension, true).readPoolFromCompound(poolCompound); } catch (IOException e) { MovingWorldExperimentsMod.logger.error(MessageFormat.format("Failed to read pool data from file for dimension {0}", parentDimension)); } } @SubscribeEvent public void onWorldLoad(WorldEvent.Load e) { Integer loadedDimensionID = e.getWorld().provider.getDimension(); if (registeredDimensions.containsKey(loadedDimensionID) || registeredDimensions.containsValue(loadedDimensionID)) { return; } try { // Register new dimension registeredDimensions.put(loadedDimensionID, activeDimID); DimensionManager.registerDimension(activeDimID, DimensionType.register("MovingWorld|P" + loadedDimensionID + "|C" + activeDimID, "movingworld", activeDimID, MovingWorldProvider.class, true)); DimensionManager.initDimension(activeDimID); WorldServer worldServer = DimensionManager.getWorld(activeDimID); // Inject set MW component alternatives. playerChunkMap.set(worldServer, new MWPlayerChunkMap(worldServer)); ServerWorldEventHandler currentEventListener = (ServerWorldEventHandler) worldServer.eventListeners.stream() .filter((ev) -> ev instanceof ServerWorldEventHandler).findFirst().get(); worldServer.eventListeners.remove(currentEventListener); worldServer.addEventListener(new MWServerWorldEventHandler(worldServer.mcServer, worldServer)); // Init pool, increment dimension number. if (isPoolDataPresent(e.getWorld().provider.getDimension())) readPoolFromFile(e.getWorld().provider.getDimension(), activeDimID); RegionPool.getPool(activeDimID, true); worldServer.addEventListener(new BoundingBoxWorldListener()); MovingWorldExperimentsMod.logger.info("DB check: " + MovingWorldExperimentsMod.modProxy.getCommonDB().getWorldFromDim(activeDimID)); activeDimID++; } catch (Exception exception) { MovingWorldExperimentsMod.logger.error("Exception on subworld registration/load ", e); } } @Mod.EventHandler public void onWorldUnload(WorldEvent.Unload e){ if (e.getWorld().isRemote && registeredDimensions.containsKey(e.getWorld().provider.getDimension())) { int subworldID = registeredDimensions.get(e.getWorld().provider.getDimension()); ((MovingWorldClientDatabase) MovingWorldExperimentsMod.modProxy.getClientDB()).worlds.remove(subworldID); } } @Mod.EventHandler public void onServerStopped(FMLServerStoppedEvent e) { MovingWorldInitHandler.registeredDimensions.forEach((parent, child) -> DimensionManager.unregisterDimension(child)); MovingWorldInitHandler.registeredDimensions = HashBiMap.create(); MovingWorldInitHandler.activeDimID = MovingWorldInitHandler.startingDimID; } @Mod.EventHandler public void onServerStopping(FMLServerStoppingEvent e) { registeredDimensions.forEach((parentWorld, subWorld) -> savePoolToFile(parentWorld, RegionPool.getPool(subWorld, false))); } @SubscribeEvent public void onConnection(PlayerEvent.PlayerLoggedInEvent e) { if (!e.isCanceled() && e.player != null && !e.player.world.isRemote) { new MessageDimensionPoolData(e.player.world.provider.getDimension(), RegionPool.getPool(e.player.world.provider.getDimension(), true).writePoolToCompound()).sendTo(e.player); } } @SubscribeEvent public void onDimChange(PlayerEvent.PlayerChangedDimensionEvent e) { if (!e.isCanceled() && e.player != null && !e.player.world.isRemote) { new MessageDimensionPoolData(e.toDim, RegionPool.getPool(e.toDim, true).writePoolToCompound()).sendTo(e.player); } } //TODO: This shouldn't be in the init handler. @SubscribeEvent public void onClientTick(TickEvent.ClientTickEvent e) { if (e.phase == TickEvent.Phase.START) { Minecraft mc = Minecraft.getMinecraft(); MovingWorldClientDatabase cDB = (MovingWorldClientDatabase) MovingWorldExperimentsMod.modProxy.getClientDB(); if (mc.isGamePaused() || mc.world == null || mc.player == null) { if ((mc.world == null || mc.player == null || mc.playerController == null) && InputReader.INSTANCE != null) { MinecraftForge.EVENT_BUS.unregister(InputReader.INSTANCE); InputReader.INSTANCE = null; } return; } if (InputReader.INSTANCE == null) { InputReader.INSTANCE = new InputReader(mc.playerController); } for (HashMap.Entry<Integer, WorldClient> mapEntry : cDB.worlds.entrySet()) { mapEntry.getValue().updateEntities(); mapEntry.getValue().tick(); } } } }
package uk.ac.ebi.atlas.trader.loader; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.runners.MockitoJUnitRunner; import uk.ac.ebi.atlas.experimentimport.ExperimentDTO; import uk.ac.ebi.atlas.model.*; import uk.ac.ebi.atlas.model.baseline.BaselineExperiment; import uk.ac.ebi.atlas.model.baseline.BaselineExperimentConfiguration; import uk.ac.ebi.atlas.model.baseline.ExperimentalFactors; import uk.ac.ebi.atlas.model.baseline.ExperimentalFactorsFactory; import uk.ac.ebi.atlas.trader.ConfigurationTrader; import uk.ac.ebi.atlas.trader.SpeciesKingdomTrader; import java.util.Collections; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.*; @RunWith(MockitoJUnitRunner.class) public class BaselineExperimentCacheLoaderTest { class Loader extends BaselineExperimentsCacheLoader { protected Loader(ExperimentalFactorsFactory experimentalFactorsFactory, ExperimentType experimentType, BaselineExperimentExpressionLevelFile expressionLevelFile, ConfigurationTrader configurationTrader, SpeciesKingdomTrader speciesKingdomTrader) { super(experimentalFactorsFactory, experimentType, expressionLevelFile, configurationTrader,speciesKingdomTrader); } } String experimentAccession = "E-MOCK-1"; ExperimentType experimentType = ExperimentType.RNASEQ_MRNA_BASELINE; ExperimentDTO dto = new ExperimentDTO(experimentAccession, experimentType, "homo_sapiens", Collections .<String>emptySet(), "mock experiment", false); @Mock ExperimentalFactorsFactory experimentalFactorsFactory; @Mock BaselineExperimentExpressionLevelFile expressionLevelFile; @Mock ConfigurationTrader configurationTrader ; @Mock SpeciesKingdomTrader speciesKingdomTrader; @Mock ExperimentConfiguration configuration; @Mock BaselineExperimentConfiguration baselineConfiguration; @Mock ExperimentalFactors experimentalFactors; @Mock AssayGroups assayGroups; @Mock ExperimentDesign experimentDesign; BaselineExperimentsCacheLoader subject; @Before public void setUp(){ subject = new Loader(experimentalFactorsFactory,experimentType,expressionLevelFile,configurationTrader, speciesKingdomTrader); when(configurationTrader.getExperimentConfiguration(experimentAccession)).thenReturn(configuration); when(configurationTrader.getBaselineFactorsConfiguration(experimentAccession)).thenReturn(baselineConfiguration); when(configuration.getAssayGroups()).thenReturn(assayGroups); when(assayGroups.getAssayGroupIds()).thenReturn(ImmutableSet.of("assay group id 1")); when(speciesKingdomTrader.getKingdom(dto.getSpecies())).thenReturn("kingdom"); when(speciesKingdomTrader.getEnsemblDB(dto.getSpecies())).thenReturn("ensembl_db"); when(experimentalFactorsFactory.createExperimentalFactors(eq(experimentAccession),eq(experimentDesign), eq(baselineConfiguration), eq(assayGroups), any(String [] .class), anyBoolean())).thenReturn (experimentalFactors); } private void verifyCollaborators() { verify(configurationTrader).getExperimentConfiguration(experimentAccession); verify(configurationTrader).getBaselineFactorsConfiguration(experimentAccession); verify(configuration).getAssayGroups(); verify(experimentalFactorsFactory).createExperimentalFactors(eq(experimentAccession),eq(experimentDesign), eq(baselineConfiguration), eq(assayGroups), any(String [] .class), anyBoolean()); verify(speciesKingdomTrader).getKingdom(dto.getSpecies()); verify(speciesKingdomTrader).getEnsemblDB(dto.getSpecies()); if(!baselineConfiguration.orderCurated()){ verify(expressionLevelFile).readOrderedAssayGroupIds(experimentAccession); } } private void noMoreInteractionsWithCollaborators() { verifyNoMoreInteractions(experimentalFactorsFactory, expressionLevelFile, configurationTrader, speciesKingdomTrader); } @Test(expected=IllegalStateException.class) public void assayGroupsShouldBeNonEmpty() throws Exception{ when(configuration.getAssayGroups()).thenReturn(Mockito.mock(AssayGroups.class)); BaselineExperiment e = subject.load(dto, "description from array express", false, experimentDesign); } @Test public void useAllCollaborators() throws Exception { BaselineExperiment e = subject.load(dto, "description from array express", false, experimentDesign); verifyCollaborators(); noMoreInteractionsWithCollaborators(); } @Test public void noAlternativeViewsForTypicalExperiment() throws Exception { BaselineExperiment e = subject.load(dto, "description from array express", false, experimentDesign); assertThat(e.alternativeViews(), hasSize(0)); verifyCollaborators(); noMoreInteractionsWithCollaborators(); } @Test public void alternativeViews() throws Exception { String alternativeViewAccession = "E-MOCK-2"; when(baselineConfiguration.getAlternativeViews()).thenReturn(ImmutableList.of(alternativeViewAccession)); BaselineExperimentConfiguration alternativeViewBaselineConfiguration = mock(BaselineExperimentConfiguration .class); when(configurationTrader.getBaselineFactorsConfiguration(alternativeViewAccession)).thenReturn (alternativeViewBaselineConfiguration); String s = "default query factor of other experiment"; when(alternativeViewBaselineConfiguration.getDefaultQueryFactorType()).thenReturn(s); BaselineExperiment e = subject.load(dto, "description from array express", false, experimentDesign); assertThat(e.alternativeViews(), hasSize(1)); assertThat(e.alternativeViews().get(0).getLeft(), is(alternativeViewAccession)); assertThat(e.alternativeViews().get(0).getRight(), containsString(s)); verifyCollaborators(); verify(baselineConfiguration, atLeastOnce()).getAlternativeViews(); verify(alternativeViewBaselineConfiguration).getDefaultQueryFactorType(); verify(configurationTrader).getBaselineFactorsConfiguration(alternativeViewAccession); noMoreInteractionsWithCollaborators(); } }
package org.herac.tuxguitar.player.impl.jsa.midiport; import java.io.File; import javax.sound.midi.Instrument; import javax.sound.midi.MidiChannel; import javax.sound.midi.MidiSystem; import javax.sound.midi.Soundbank; import javax.sound.midi.Synthesizer; import org.herac.tuxguitar.gui.TuxGuitar; import org.herac.tuxguitar.player.base.MidiControllers; import org.herac.tuxguitar.player.base.MidiOutputPort; import org.herac.tuxguitar.player.base.MidiPlayerException; import org.herac.tuxguitar.player.base.MidiReceiver; import org.herac.tuxguitar.player.impl.jsa.assistant.SBAssistant; import org.herac.tuxguitar.player.impl.jsa.utils.MidiConfigUtils; public class MidiPortSynthesizer extends MidiOutputPort{ private Synthesizer synthesizer; private MidiReceiver receiver; private boolean synthesizerLoaded; private boolean soundbankLoaded; public MidiPortSynthesizer(Synthesizer synthesizer){ super(synthesizer.getDeviceInfo().getName(),synthesizer.getDeviceInfo().getName()); this.synthesizer = synthesizer; this.receiver = new MidiPortSynthesizerReceiver(this); } public void open(){ getSynthesizer(); } public void close(){ if(this.synthesizer != null && this.synthesizer.isOpen()){ this.unloadSoundbank(); this.synthesizer.close(); } } public MidiReceiver getReceiver(){ return this.receiver; } public void check() throws MidiPlayerException{ if(!isSynthesizerLoaded()){ throw new MidiPlayerException(TuxGuitar.getProperty("jsa.error.midi.unavailable")); } if(!isSoundbankLoaded( true )){ throw new MidiPlayerException(TuxGuitar.getProperty("jsa.error.soundbank.unavailable")); } } public Synthesizer getSynthesizer() { try { if(!this.synthesizer.isOpen()){ this.synthesizer.open(); if(!isSoundbankLoaded( false )){ String path = MidiConfigUtils.getSoundbankPath(); if(path != null){ this.loadSoundbank(new File(path)); } if(!isSoundbankLoaded( true )){ this.loadSoundbank( this.synthesizer.getDefaultSoundbank() ); } if(!isSoundbankLoaded( true )){ new SBAssistant(this).process(); } } } this.synthesizerLoaded = this.synthesizer.isOpen(); } catch (Throwable throwable) { throwable.printStackTrace(); } return this.synthesizer; } public boolean loadSoundbank(File file){ try { return loadSoundbank( MidiSystem.getSoundbank(file) ); }catch (Throwable throwable) { new MidiPlayerException(TuxGuitar.getProperty("jsa.error.soundbank.custom"),throwable).printStackTrace(); } return false; } public boolean loadSoundbank(Soundbank sb) { try { if (sb != null && getSynthesizer().isSoundbankSupported(sb)){ //unload the old soundbank this.unloadSoundbank(); //load all soundbank instruments this.soundbankLoaded = getSynthesizer().loadAllInstruments(sb); } }catch (Throwable throwable) { throwable.printStackTrace(); } return this.soundbankLoaded; } public void unloadSoundbank(){ try { this.soundbankLoaded = false; //unload all available instruments Instrument[] available = this.synthesizer.getAvailableInstruments(); if(available != null){ for(int i = 0; i < available.length; i++){ getSynthesizer().unloadInstrument(available[i]); } } //unload all loaded instruments Instrument[] loaded = this.synthesizer.getLoadedInstruments(); if(loaded != null){ for(int i = 0; i < loaded.length; i++){ getSynthesizer().unloadInstrument(loaded[i]); } } }catch (Throwable throwable) { throwable.printStackTrace(); } } public boolean isSynthesizerLoaded(){ return this.synthesizerLoaded; } public boolean isSoundbankLoaded(boolean checkSynth){ if( checkSynth ){ Instrument[] loaded = this.synthesizer.getLoadedInstruments(); Instrument[] available = this.synthesizer.getAvailableInstruments(); this.soundbankLoaded = ( (loaded != null && loaded.length > 0) || (available != null && available.length > 0) ); } return this.soundbankLoaded; } } class MidiPortSynthesizerReceiver implements MidiReceiver{ private MidiPortSynthesizer port; private MidiChannel[] channels; public MidiPortSynthesizerReceiver(MidiPortSynthesizer port){ this.port = port; } private MidiChannel[] getChannels(){ if(this.channels == null && this.port.getSynthesizer() != null){ this.channels = this.port.getSynthesizer().getChannels(); } return this.channels; } public void sendSystemReset(){ if(getChannels() != null){ for(int i = 0;i < getChannels().length; i ++){ getChannels()[i].resetAllControllers(); } } } public void sendAllNotesOff(){ if(getChannels() != null){ for(int channel = 0;channel < getChannels().length;channel ++){ sendControlChange(channel, MidiControllers.ALL_NOTES_OFF,0); } } } public void sendNoteOn(int channel, int key, int velocity){ if(getChannels() != null && channel >= 0 && channel < getChannels().length){ getChannels()[channel].noteOn(key, velocity); } } public void sendNoteOff(int channel, int key, int velocity){ if(getChannels() != null && channel >= 0 && channel < getChannels().length){ getChannels()[channel].noteOff(key, velocity); } } public void sendControlChange(int channel, int controller, int value){ if(getChannels() != null && channel >= 0 && channel < getChannels().length){ getChannels()[channel].controlChange(controller, value); } } public void sendProgramChange(int channel, int value){ if(getChannels() != null && channel >= 0 && channel < getChannels().length){ getChannels()[channel].programChange(value); } } public void sendPitchBend(int channel, int value){ if(getChannels() != null && channel >= 0 && channel < getChannels().length){ getChannels()[channel].setPitchBend( (value * 128) ); } } }
package io.usersource.annoplugin.gesture; import io.usersource.annoplugin.R; import io.usersource.annoplugin.utils.PluginUtils; import io.usersource.annoplugin.utils.ScreenshotUtils; import io.usersource.annoplugin.utils.ViewUtils; import io.usersource.annoplugin.view.AnnoMainActivity; import io.usersource.annoplugin.view.FeedbackEditActivity; import io.usersource.annoplugin.view.FeedbackViewActivity; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.util.ArrayList; import android.app.Activity; import android.content.Intent; import android.gesture.Gesture; import android.gesture.GestureLibraries; import android.gesture.GestureLibrary; import android.gesture.GestureOverlayView; import android.gesture.GestureOverlayView.OnGesturePerformedListener; import android.gesture.Prediction; import android.graphics.Bitmap; import android.net.Uri; import android.os.Environment; import android.util.Log; /** * Screenshot gesture listener, detect and process spiral gesture. * * @author topcircler * */ public class ScreenshotGestureListener implements OnGesturePerformedListener { private static final String TAG = "ScreenshotGestureListener"; private static final String FEEDBACK_ACTIVITY = "io.usersource.annoplugin.view.FeedbackEditActivity"; private static final String GESTURE_NAME_PATTERN = "UserSource spiral[0-9]"; private static final String SCREENSHOTS_DIR_NAME = "Screenshots"; private Activity activity; private GestureLibrary gestureLibrary = null; public ScreenshotGestureListener(Activity activity, int rawResourceId) { this.activity = activity; gestureLibrary = GestureLibraries.fromRawResource(activity, rawResourceId); gestureLibrary.load(); } /* * (non-Javadoc) * * @see android.gesture.GestureOverlayView.OnGesturePerformedListener# * onGesturePerformed(android.gesture.GestureOverlayView, * android.gesture.Gesture) */ @Override public void onGesturePerformed(GestureOverlayView arg0, Gesture gesture) { int level = 0; if (activity instanceof FeedbackEditActivity) { level = ((FeedbackEditActivity) activity).getLevel(); } else if (activity instanceof FeedbackViewActivity) { level = ((FeedbackViewActivity) activity).getLevel(); } else if (activity instanceof AnnoMainActivity) { level = ((AnnoMainActivity) activity).getLevel(); } if (level >= 2) { Log.d(TAG, "Already 2 levels, no recursive any more."); return; } ArrayList<Prediction> predictions = gestureLibrary.recognize(gesture); if (predictions != null) { for (Prediction prediction : predictions) { if (prediction.name.matches(GESTURE_NAME_PATTERN)) { if (prediction.score > 1) { String screenshotPath; try { screenshotPath = takeScreenshot(); launchAnnoPlugin(screenshotPath); } catch (FileNotFoundException e) { Log.e(TAG, e.getMessage(), e); ViewUtils.displayError(activity, R.string.fail_take_screenshot); } catch (IOException e) { Log.e(TAG, e.getMessage()); ViewUtils.displayError(activity, R.string.fail_take_screenshot); } break; } } } } } private void launchAnnoPlugin(String screenshotPath) { String packageName = activity.getPackageName(); Intent intent = new Intent(Intent.ACTION_SEND); intent.setClassName(packageName, FEEDBACK_ACTIVITY);
package uk.ac.ebi.quickgo.annotation.model; import uk.ac.ebi.quickgo.annotation.common.document.AnnotationFields; import uk.ac.ebi.quickgo.common.validator.GeneProductIDList; import uk.ac.ebi.quickgo.rest.search.filter.RequestFilter; import java.util.HashMap; import java.util.Map; import java.util.stream.Stream; import javax.validation.constraints.Max; import javax.validation.constraints.Min; import javax.validation.constraints.Pattern; public class AnnotationRequest { public static final int DEFAULT_ENTRIES_PER_PAGE = 25; public static final int MAX_ENTRIES_PER_PAGE = 100; private static final String COMMA = ","; private static final int DEFAULT_PAGE_NUMBER = 1; //Non-data parameters @Min(0) @Max(MAX_ENTRIES_PER_PAGE) private int limit = DEFAULT_ENTRIES_PER_PAGE; @Min(1) private int page = DEFAULT_PAGE_NUMBER; private final Map<String, String> filters = new HashMap<>(); /** * E.g. ASPGD,Agbase,.. * In the format assignedBy=ASPGD,Agbase */ public void setAssignedBy(String assignedBy) { filters.put(AnnotationFields.ASSIGNED_BY, assignedBy); } @Pattern(regexp = "^[A-Za-z][A-Za-z\\-_]+(,[A-Za-z][A-Za-z\\-_]+)*") public String getAssignedBy() { return filters.get(AnnotationFields.ASSIGNED_BY); } //TODO:change the way the field is referenced private static final String ASPECT_FIELD = "aspect"; public void setAspect(String aspect) { if(aspect != null) { filters.put(ASPECT_FIELD, aspect.toLowerCase()); } } @Pattern(regexp = "(?i)biological_process|molecular_function|cellular_component") public String getAspect() { return filters.get(ASPECT_FIELD); } /** * Gene Product IDs, in CSV format. */ public void setGeneProductId(String listOfGeneProductIDs){ if(listOfGeneProductIDs != null) { filters.put(AnnotationFields.GENE_PRODUCT_ID, listOfGeneProductIDs); } } @GeneProductIDList public String getGeneProductId(){ return filters.get(AnnotationFields.GENE_PRODUCT_ID); } public void setPage(int page) { this.page = page; } public void setLimit(int limit) { this.limit = limit; } public int getLimit() { return limit; } public int getPage() { return page; } public Stream<RequestFilter> convertToFilters() { return filters.entrySet().stream().map(filter -> new RequestFilter(filter.getKey(), splitFilterValues(filter.getValue()))); } private String[] splitFilterValues(String values) { return values.split(COMMA); } }
package com.daviancorp.android.data.database; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.HashMap; import android.app.DownloadManager; import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.database.SQLException; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteException; import android.database.sqlite.SQLiteOpenHelper; import android.database.sqlite.SQLiteQueryBuilder; import android.util.Log; import com.daviancorp.android.data.classes.Wishlist; import com.daviancorp.android.data.classes.WishlistComponent; import com.daviancorp.android.data.classes.WishlistData; import com.readystatesoftware.sqliteasset.SQLiteAssetHelper; /* QUERY REFERENCE: For queries with no JOINs: - call wrapHelper() - set values for _Distinct _Table _Columns _Selection _SelectionArgs _GroupBy _Having _OrderBy _Limit For queries with JOINs: - call wrapJoinHelper(SQLiteQueryBuilder qb) = set values for _Columns _Selection _SelectionArgs _GroupBy _Having _OrderBy _Limit */ class MonsterHunterDatabaseHelper extends SQLiteAssetHelper { private static final String TAG = "MonsterHunterDatabaseHelper"; private static MonsterHunterDatabaseHelper mInstance = null; //The Android's default system path of your application database. // /data/data/com.daviancorp.android.monsterhunter4udatabase/databases/ /*private static String DB_PATH = "/data/data/com.daviancorp.android.mh4udatabase/databases/"; private static String DB_NAME = "mh4u.db"; private static String DB_TEMP_NAME = "mh4u_temp.db"; private static String ASSETS_DB_FOLDER = "db"; private static final int VERSION = 16; // EDIT*/ private static final String DATABASE_NAME = "mh4u.db"; private static final int DATABASE_VERSION = 4; private final Context myContext; private SQLiteDatabase myDataBase; /** * Returns Singleton instance of the helper object * @param c Application context * @return Singleton instance of helper */ public static MonsterHunterDatabaseHelper getInstance(Context c) { // Use the application context, which will ensure that you // don't accidentally leak an Activity's context. if (mInstance == null) { mInstance = new MonsterHunterDatabaseHelper(c.getApplicationContext()); } return mInstance; } /** * Initialize the helper object * @param context */ private MonsterHunterDatabaseHelper(Context context) { super(context, DATABASE_NAME, null, DATABASE_VERSION); myContext = context; setForcedUpgrade(); /*try { createDatabase(); } catch (IOException e) { throw new Error("Error copying database"); }*/ } /** * Creates a empty database on the system and overwrite it with your own * database. **/ /*public void createDatabase() throws IOException { boolean dbExist = checkDatabase(); if (!dbExist) { super.getReadableDatabase(); try { copyDatabase(); } catch (IOException e) { throw new Error("Error copying database"); } try { getWritableDatabase().execSQL("INSERT INTO 'wishlist' (`_id`, `name`) VALUES (1, 'My Wishlist');"); } finally { close(); } } }*/ /** * Check if the database already exist to avoid re-copying the file each * time you open the application. * * @return true if it exists, false if it doesn't */ /*private boolean checkDatabase() { File file = new File(DB_PATH + DB_NAME); return file.exists() && !file.isDirectory(); }*/ /** * Copy distributed db in assets folder to data folder * @throws IOException */ /*private void copyDatabase() throws IOException { String[] dbFiles = myContext.getAssets().list(ASSETS_DB_FOLDER); String outFileName = DB_PATH + DB_NAME; OutputStream myOutput = new FileOutputStream(outFileName); for(int i =0; i < dbFiles.length; i++) { InputStream myInput = myContext.getAssets().open(ASSETS_DB_FOLDER+"/"+dbFiles[i]); byte[] buffer = new byte[1024]; int length; while ((length = myInput.read(buffer)) > 0) { myOutput.write(buffer, 0, length); } myInput.close(); } myOutput.flush(); myOutput.close(); }*/ /** * Copy distributed db in assets folder to temp file for upgrade * @throws IOException */ /*private void copyTempDatabase() throws IOException { String[] dbFiles = myContext.getAssets().list(ASSETS_DB_FOLDER); String outFileName = DB_PATH + DB_TEMP_NAME; OutputStream myOutput = new FileOutputStream(outFileName); for(int i =0; i < dbFiles.length; i++) { InputStream myInput = myContext.getAssets().open(ASSETS_DB_FOLDER+"/"+dbFiles[i]); byte[] buffer = new byte[1024]; int length; while ((length = myInput.read(buffer)) > 0) { myOutput.write(buffer, 0, length); } myInput.close(); } myOutput.flush(); myOutput.close(); }*/ /** * Set database instance * @throws SQLException */ public void openDatabase() throws SQLException { myDataBase = getWritableDatabase(); } /** * Returns an opened instance of the temp database * @return The temp database object * @throws SQLException */ /*public SQLiteDatabase openTempDatabase() throws SQLException { // Open the database String myPath = DB_PATH + DB_TEMP_NAME; return SQLiteDatabase.openDatabase(myPath, null, SQLiteDatabase.OPEN_READWRITE|SQLiteDatabase.NO_LOCALIZED_COLLATORS); }*/ /** * Close database */ @Override public synchronized void close() { if (myDataBase != null) myDataBase.close(); super.close(); } //@Override //public void onCreate(SQLiteDatabase db) { } /** * Copy the new database and transfer the wishlist data */ /*@Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { // Transfer over the wishlist data if (newVersion > oldVersion) { File file = new File(DB_PATH + DB_TEMP_NAME); if(file.exists()) file.delete(); //query wishlist data with provided db instance //Must NOT call getReadableDatabase() or loop will occur WishlistCursor wc = queryWishlists(db); WishlistDataCursor wdc = queryWishlistsData(db); WishlistComponentCursor wcc = queryWishlistsComponent(db); //Pull the new database to a temp file try { copyTempDatabase(); } catch (IOException e) { throw new Error("Error copying database"); } //get connection to temp database SQLiteDatabase newDb = null; try { newDb = openTempDatabase(); } catch (SQLException e) { } //Copy the wishlast tables from current db to new db if(newDb != null) { wc.moveToFirst(); wdc.moveToFirst(); wcc.moveToFirst(); while (!wc.isAfterLast()) { Wishlist wishlist = wc.getWishlist(); queryAddWishlistAll(newDb, wishlist.getId(), wishlist.getName()); wc.moveToNext(); } wc.close(); while (!wdc.isAfterLast()) { WishlistData wishlistData = wdc.getWishlistData(); queryAddWishlistDataAll(newDb, wishlistData.getWishlistId(), wishlistData.getItem().getId(), wishlistData.getQuantity(), wishlistData.getSatisfied(), wishlistData.getPath()); wdc.moveToNext(); } wdc.close(); while (!wcc.isAfterLast()) { WishlistComponent wishlistComponent = wcc.getWishlistComponent(); queryAddWishlistComponentAll(newDb, wishlistComponent.getWishlistId(), wishlistComponent.getItem().getId(), wishlistComponent.getQuantity(), wishlistComponent.getNotes()); wcc.moveToNext(); } wcc.close(); newDb.close(); } db.close(); File file2 = new File(DB_PATH + DB_NAME); if(file2.exists()) file2.delete(); //Overwrite current db with temp db //Overwriting with file streams as delete/rename doesn't seem to work correctly try { InputStream myInput = new FileInputStream(DB_PATH + DB_TEMP_NAME); OutputStream myOutput = new FileOutputStream(DB_PATH + DB_NAME); byte[] buffer = new byte[1024]; int length; while ((length = myInput.read(buffer)) > 0) { myOutput.write(buffer, 0, length); } myOutput.flush(); myOutput.close(); myInput.close(); } catch (IOException e) { throw new Error("Error overwritting database"); } File file3 = new File(DB_PATH + DB_TEMP_NAME); if(file3.exists()) file3.delete(); } }*/ //@Override //public void onDowngrade(SQLiteDatabase db, int oldVersion, int newVersion) { } //removed getWritableDatabase() and getReadableDatabase() overrides as they broke //functionality such as onUpgrade() private String makePlaceholders(int len) { if (len < 1) { // It will lead to an invalid query anyway .. throw new RuntimeException("No placeholders"); } else { StringBuilder sb = new StringBuilder(len * 2 - 1); sb.append("?"); for (int i = 1; i < len; i++) { sb.append(",?"); } return sb.toString(); } } /* * Helper method: used for queries that has no JOINs */ private Cursor wrapHelper(QueryHelper qh) { return getReadableDatabase().query(qh.Distinct, qh.Table, qh.Columns, qh.Selection, qh.SelectionArgs, qh.GroupBy, qh.Having, qh.OrderBy, qh.Limit); } /* * Helper method: used for queries that has no JOINs */ private Cursor wrapHelper(SQLiteDatabase db, QueryHelper qh) { return db.query(qh.Distinct, qh.Table, qh.Columns, qh.Selection, qh.SelectionArgs, qh.GroupBy, qh.Having, qh.OrderBy, qh.Limit); } /* * Helper method: used for queries that has JOINs */ private Cursor wrapJoinHelper(SQLiteQueryBuilder qb, QueryHelper qh) { // Log.d(TAG, "qb: " + qb.buildQuery(_Columns, _Selection, _SelectionArgs, _GroupBy, _Having, _OrderBy, _Limit)); return qb.query(getReadableDatabase(), qh.Columns, qh.Selection, qh.SelectionArgs, qh.GroupBy, qh.Having, qh.OrderBy, qh.Limit); } /* * Insert data to a table */ public long insertRecord(String table, ContentValues values) { long l = getWritableDatabase().insert(table, null, values); return l; } /* * Insert data to a table */ public long insertRecord(SQLiteDatabase db, String table, ContentValues values) { long l = db.insert(table, null, values); return l; } /* * Update data in a table */ public int updateRecord(String table, String strFilter, ContentValues values) { int i = getWritableDatabase().update(table, values, strFilter, null); return i; } /* * Delete data in a table */ public boolean deleteRecord(String table, String where, String[] args) { boolean b = getWritableDatabase().delete(table, where, args) > 0; return b; } /* * Get all arena quests */ public ArenaQuestCursor queryArenaQuests() { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_ARENA_QUESTS; qh.Selection = null; qh.SelectionArgs = null; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new ArenaQuestCursor(wrapJoinHelper(builderArenaQuest(), qh)); } /* * Get a specific arena quest */ public ArenaQuestCursor queryArenaQuest(long id) { QueryHelper qh = new QueryHelper(); qh.Distinct = false; qh.Table = S.TABLE_ARENA_QUESTS; qh.Columns = null; qh.Selection = "a." + S.COLUMN_ARENA_QUESTS_ID + " = ?"; qh.SelectionArgs = new String[]{ String.valueOf(id) }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new ArenaQuestCursor(wrapJoinHelper(builderArenaQuest(), qh)); } /* * Get all arena quests based on location */ public ArenaQuestCursor queryArenaQuestLocation(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_ARENA_QUESTS; qh.Selection = "a." + S.COLUMN_ARENA_QUESTS_LOCATION_ID + " = ? "; qh.SelectionArgs = new String[]{"" + id}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new ArenaQuestCursor(wrapJoinHelper(builderArenaQuest(), qh)); } /* * Helper method to query for ArenaQuest */ private SQLiteQueryBuilder builderArenaQuest() { // SELECT a._id AS _id, a.name AS aname, a.location_id, a.reward., // a.num_participants, a.time_s, a.time_a, a.time_b, // l.name AS lname // FROM arena_quests AS a // LEFT OUTER JOIN locations AS l on a.location_id = l._id; String a = "a"; String l = "l"; HashMap<String, String> projectionMap = new HashMap<String, String>(); projectionMap.put("_id", a + "." + S.COLUMN_ARENA_QUESTS_ID + " AS " + "_id"); projectionMap.put(S.COLUMN_ARENA_QUESTS_NAME, a + "." + S.COLUMN_ARENA_QUESTS_NAME + " AS " + a + S.COLUMN_ARENA_QUESTS_NAME); projectionMap.put(S.COLUMN_ARENA_QUESTS_GOAL, a + "." + S.COLUMN_ARENA_QUESTS_GOAL); projectionMap.put(S.COLUMN_ARENA_QUESTS_LOCATION_ID, a + "." + S.COLUMN_ARENA_QUESTS_LOCATION_ID); projectionMap.put(S.COLUMN_ARENA_QUESTS_REWARD, a + "." + S.COLUMN_ARENA_QUESTS_REWARD); projectionMap.put(S.COLUMN_ARENA_QUESTS_NUM_PARTICIPANTS, a + "." + S.COLUMN_ARENA_QUESTS_NUM_PARTICIPANTS); projectionMap.put(S.COLUMN_ARENA_QUESTS_TIME_S, a + "." + S.COLUMN_ARENA_QUESTS_TIME_S); projectionMap.put(S.COLUMN_ARENA_QUESTS_TIME_A, a + "." + S.COLUMN_ARENA_QUESTS_TIME_A); projectionMap.put(S.COLUMN_ARENA_QUESTS_TIME_B, a + "." + S.COLUMN_ARENA_QUESTS_TIME_B); projectionMap.put(l + S.COLUMN_LOCATIONS_NAME, l + "." + S.COLUMN_LOCATIONS_NAME + " AS " + l + S.COLUMN_LOCATIONS_NAME); //Create new querybuilder SQLiteQueryBuilder QB = new SQLiteQueryBuilder(); QB.setTables(S.TABLE_ARENA_QUESTS + " AS a" + " LEFT OUTER JOIN " + S.TABLE_LOCATIONS + " AS l " + " ON " + "a." + S.COLUMN_ARENA_QUESTS_LOCATION_ID + " = " + "l." + S.COLUMN_LOCATIONS_ID); QB.setProjectionMap(projectionMap); return QB; } /* * Get all reward arena quests based on item */ public ArenaRewardCursor queryArenaRewardItem(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_ARENA_REWARDS; qh.Selection = "ar." + S.COLUMN_ARENA_REWARDS_ITEM_ID + " = ? "; qh.SelectionArgs = new String[]{"" + id}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = "ar." + S.COLUMN_ARENA_REWARDS_PERCENTAGE + " DESC"; qh.Limit = null; return new ArenaRewardCursor(wrapJoinHelper(builderArenaReward(), qh)); } /* * Get all arena quest reward items based on arena quest */ public ArenaRewardCursor queryArenaRewardArena(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_ARENA_REWARDS; qh.Selection = "ar." + S.COLUMN_ARENA_REWARDS_ARENA_ID + " = ? "; qh.SelectionArgs = new String[]{"" + id}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new ArenaRewardCursor(wrapJoinHelper(builderArenaReward(), qh)); } /* * Helper method to query for ArenaReward */ private SQLiteQueryBuilder builderArenaReward() { // SELECT ar._id AS _id, ar.arena_id, ar.item_id, // ar.percentage, ar.stack_size, // a.name AS aname, i.name AS iname // FROM quest_rewards AS ar // LEFT OUTER JOIN arena_quests AS a ON ar.arena_id = q._id // LEFT OUTER JOIN items AS i ON ar.item_id = i._id; String ar = "ar"; String i = "i"; String a = "a"; HashMap<String, String> projectionMap = new HashMap<String, String>(); projectionMap.put("_id", ar + "." + S.COLUMN_ARENA_REWARDS_ID + " AS " + "_id"); projectionMap.put(S.COLUMN_ARENA_REWARDS_ITEM_ID, ar + "." + S.COLUMN_ARENA_REWARDS_ITEM_ID); projectionMap.put(S.COLUMN_ARENA_REWARDS_ARENA_ID, ar + "." + S.COLUMN_ARENA_REWARDS_ARENA_ID); projectionMap.put(S.COLUMN_ARENA_REWARDS_PERCENTAGE, ar + "." + S.COLUMN_ARENA_REWARDS_PERCENTAGE); projectionMap.put(S.COLUMN_ARENA_REWARDS_STACK_SIZE, ar + "." + S.COLUMN_ARENA_REWARDS_STACK_SIZE); projectionMap.put(i + S.COLUMN_ITEMS_NAME, i + "." + S.COLUMN_ITEMS_NAME + " AS " + i + S.COLUMN_ITEMS_NAME); projectionMap.put(S.COLUMN_ITEMS_ICON_NAME, i + "." + S.COLUMN_ITEMS_ICON_NAME); projectionMap.put(a + S.COLUMN_ARENA_QUESTS_NAME, a + "." + S.COLUMN_ARENA_QUESTS_NAME + " AS " + a + S.COLUMN_ARENA_QUESTS_NAME); //Create new querybuilder SQLiteQueryBuilder QB = new SQLiteQueryBuilder(); QB.setTables(S.TABLE_ARENA_REWARDS + " AS ar" + " LEFT OUTER JOIN " + S.TABLE_ITEMS + " AS i" + " ON " + "ar." + S.COLUMN_ARENA_REWARDS_ITEM_ID + " = " + "i." + S.COLUMN_ITEMS_ID + " LEFT OUTER JOIN " + S.TABLE_ARENA_QUESTS + " AS a " + " ON " + "ar." + S.COLUMN_ARENA_REWARDS_ARENA_ID + " = " + "a." + S.COLUMN_ARENA_QUESTS_ID); QB.setProjectionMap(projectionMap); return QB; } /* * Get all armor */ public ArmorCursor queryArmor() { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_ARMOR; qh.Selection = null; qh.SelectionArgs = null; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new ArmorCursor(wrapJoinHelper(builderArmor(), qh)); } /* * Get a specific armor */ public ArmorCursor queryArmor(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_ARMOR; qh.Selection = "a." + S.COLUMN_ARMOR_ID + " = ?"; qh.SelectionArgs = new String[]{ String.valueOf(id) }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = "1"; return new ArmorCursor(wrapJoinHelper(builderArmor(), qh)); } /* * Get a specific armor based on hunter type */ public ArmorCursor queryArmorType(String type) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_ARMOR; qh.Selection = "a." + S.COLUMN_ARMOR_HUNTER_TYPE + " = ? " + " OR " + "a." + S.COLUMN_ARMOR_HUNTER_TYPE + " = 'Both'"; qh.SelectionArgs = new String[]{type}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new ArmorCursor(wrapJoinHelper(builderArmor(), qh)); } /* * Get a specific armor based on slot */ public ArmorCursor queryArmorSlot(String slot) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_ARMOR; qh.Selection = "a." + S.COLUMN_ARMOR_SLOT + " = ?"; qh.SelectionArgs = new String[]{slot}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new ArmorCursor(wrapJoinHelper(builderArmor(), qh)); } /* * Get a specific armor based on hunter type and slot */ public ArmorCursor queryArmorTypeSlot(String type, String slot) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_ARMOR; qh.Selection = "(a." + S.COLUMN_ARMOR_HUNTER_TYPE + " = ?" + " OR " + "a." + S.COLUMN_ARMOR_HUNTER_TYPE + " = 'Both') " + " AND " + "a." + S.COLUMN_ARMOR_SLOT + " = ?"; qh.SelectionArgs = new String[]{type, slot}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new ArmorCursor(wrapJoinHelper(builderArmor(), qh)); } /* * Helper method to query for armor */ private SQLiteQueryBuilder builderArmor() { // SELECT a._id AS _id, a.slot, a.defense, a.max_defense, a.fire_res, a.thunder_res, // a.dragon_res, a.water_res, a.ice_res, a.gender, a.hunter_type, a.num_slots, // i.name, i.jpn_name, i.type, i.rarity, i.carry_capacity, i.buy, i.sell, i.description, // i.icon_name, i.armor_dupe_name_fix // FROM armor AS a LEFT OUTER JOIN items AS i ON a._id = i._id; String a = "a"; String i = "i"; HashMap<String, String> projectionMap = new HashMap<String, String>(); projectionMap.put("_id", a + "." + S.COLUMN_ARMOR_ID + " AS " + "_id"); projectionMap.put(S.COLUMN_ARMOR_SLOT, a + "." + S.COLUMN_ARMOR_SLOT); projectionMap.put(S.COLUMN_ARMOR_DEFENSE, a + "." + S.COLUMN_ARMOR_DEFENSE); projectionMap.put(S.COLUMN_ARMOR_MAX_DEFENSE, a + "." + S.COLUMN_ARMOR_MAX_DEFENSE); projectionMap.put(S.COLUMN_ARMOR_FIRE_RES, a + "." + S.COLUMN_ARMOR_FIRE_RES); projectionMap.put(S.COLUMN_ARMOR_THUNDER_RES, a + "." + S.COLUMN_ARMOR_THUNDER_RES); projectionMap.put(S.COLUMN_ARMOR_DRAGON_RES, a + "." + S.COLUMN_ARMOR_DRAGON_RES); projectionMap.put(S.COLUMN_ARMOR_WATER_RES, a + "." + S.COLUMN_ARMOR_WATER_RES); projectionMap.put(S.COLUMN_ARMOR_ICE_RES, a + "." + S.COLUMN_ARMOR_ICE_RES); projectionMap.put(S.COLUMN_ARMOR_GENDER, a + "." + S.COLUMN_ARMOR_GENDER); projectionMap.put(S.COLUMN_ARMOR_HUNTER_TYPE, a + "." + S.COLUMN_ARMOR_HUNTER_TYPE); projectionMap.put(S.COLUMN_ARMOR_NUM_SLOTS, a + "." + S.COLUMN_ARMOR_NUM_SLOTS); projectionMap.put(S.COLUMN_ITEMS_NAME, i + "." + S.COLUMN_ITEMS_NAME); projectionMap.put(S.COLUMN_ITEMS_JPN_NAME, i + "." + S.COLUMN_ITEMS_JPN_NAME); projectionMap.put(S.COLUMN_ITEMS_TYPE, i + "." + S.COLUMN_ITEMS_TYPE); projectionMap.put(S.COLUMN_ITEMS_SUB_TYPE, i + "." + S.COLUMN_ITEMS_SUB_TYPE); projectionMap.put(S.COLUMN_ITEMS_RARITY, i + "." + S.COLUMN_ITEMS_RARITY); projectionMap.put(S.COLUMN_ITEMS_CARRY_CAPACITY, i + "." + S.COLUMN_ITEMS_CARRY_CAPACITY); projectionMap.put(S.COLUMN_ITEMS_BUY, i + "." + S.COLUMN_ITEMS_BUY); projectionMap.put(S.COLUMN_ITEMS_SELL, i + "." + S.COLUMN_ITEMS_SELL); projectionMap.put(S.COLUMN_ITEMS_DESCRIPTION, i + "." + S.COLUMN_ITEMS_DESCRIPTION); projectionMap.put(S.COLUMN_ITEMS_ICON_NAME, i + "." + S.COLUMN_ITEMS_ICON_NAME); projectionMap.put(S.COLUMN_ITEMS_ARMOR_DUPE_NAME_FIX, i + "." + S.COLUMN_ITEMS_ARMOR_DUPE_NAME_FIX); //Create new querybuilder SQLiteQueryBuilder QB = new SQLiteQueryBuilder(); QB.setTables(S.TABLE_ARMOR + " AS a" + " LEFT OUTER JOIN " + S.TABLE_ITEMS + " AS i" + " ON " + "a." + S.COLUMN_ARMOR_ID + " = " + "i." + S.COLUMN_ITEMS_ID); QB.setProjectionMap(projectionMap); return QB; } /* * Get all combinings */ public CombiningCursor queryCombinings() { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_COMBINING; qh.Selection = null; qh.SelectionArgs = null; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new CombiningCursor(wrapJoinHelper(builderCursor(), qh)); } /* * Get a specific combining */ public CombiningCursor queryCombining(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_COMBINING; qh.Selection = "c._id" + " = ?"; qh.SelectionArgs = new String[]{ String.valueOf(id) }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = "1"; return new CombiningCursor(wrapJoinHelper(builderCursor(), qh)); } public CombiningCursor queryCombinationsOnItemID(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_COMBINING; qh.Selection = "crt._id" + " = ?" + " OR mat1._id" + " = ?" + " OR mat2._id" + " = ?"; qh.SelectionArgs = new String[]{ String.valueOf(id), String.valueOf(id), String.valueOf(id)}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new CombiningCursor(wrapJoinHelper(builderCursor(), qh)); } private SQLiteQueryBuilder builderCursor() { // SELECT c._id AS _id, c.amount_made_min, c.amount_made_max, c.percentage, // crt._id AS crt__id, crt.name AS crt_name, crt.jpn_name AS crt_jpn_name, crt.type AS crt_type, crt.rarity AS crt_rarity, // crt.carry_capacity AS crt_carry_capacity, crt.buy AS crt_buy, crt.sell AS crt_sell, crt.description AS crt_description, // crt.icon_name AS crt_icon_name, crt.armor_dupe_name_fix AS crt_armor_dupe_name, // mat1._id AS mat1__id, mat1.name AS mat1_name, mat1.jpn_name AS mat1_jpn_name, mat1.type AS mat1_type, mat1.rarity AS mat1_rarity, // mat1.carry_capacity AS mat1_carry_capacity, mat1.buy AS mat1_buy, mat1.sell AS mat1_sell, mat1.description AS mat1_description, // mat1.icon_name AS mat1_icon_name, mat1.armor_dupe_name_fix AS mat1_armor_dupe_name, // mat2._id AS mat2__id, mat2.name AS mat2_name, mat2.jpn_name AS mat2_jpn_name, mat2.type AS mat2_type, mat2.rarity AS mat2_rarity, // mat2.carry_capacity AS mat2_carry_capacity, mat2.buy AS mat2_buy, mat2.sell AS mat2_sell, mat2.description AS mat2_description, // mat2.icon_name AS mat2_icon_name, mat2.armor_dupe_name_fix AS mat2_armor_dupe_name // FROM combining AS c LEFT OUTER JOIN items AS crt ON c.created_item_id = crt._id // LEFT OUTER JOIN items AS mat1 ON c.item_1_id = mat1._id // LEFT OUTER JOIN items AS mat2 ON c.item_2_id = mat2._id; String comb = "c."; String[] items = new String[] {"crt", "mat1", "mat2"}; HashMap<String, String> projectionMap = new HashMap<String, String>(); projectionMap.put("_id", comb + S.COLUMN_ITEMS_ID + " AS " + "_id"); projectionMap.put(S.COLUMN_COMBINING_AMOUNT_MADE_MIN, comb + S.COLUMN_COMBINING_AMOUNT_MADE_MIN); projectionMap.put(S.COLUMN_COMBINING_AMOUNT_MADE_MAX, comb + S.COLUMN_COMBINING_AMOUNT_MADE_MAX); projectionMap.put(S.COLUMN_COMBINING_PERCENTAGE, comb + S.COLUMN_COMBINING_PERCENTAGE); for (String i : items) { projectionMap.put(i + S.COLUMN_ITEMS_ID, i + "." + S.COLUMN_ITEMS_ID + " AS " + i + S.COLUMN_ITEMS_ID); projectionMap.put(i + S.COLUMN_ITEMS_NAME, i + "." + S.COLUMN_ITEMS_NAME + " AS " + i + S.COLUMN_ITEMS_NAME); projectionMap.put(i + S.COLUMN_ITEMS_JPN_NAME, i + "." + S.COLUMN_ITEMS_JPN_NAME + " AS " + i + S.COLUMN_ITEMS_JPN_NAME); projectionMap.put(i + S.COLUMN_ITEMS_TYPE, i + "." + S.COLUMN_ITEMS_TYPE + " AS " + i + S.COLUMN_ITEMS_TYPE); projectionMap.put(i + S.COLUMN_ITEMS_SUB_TYPE, i + "." + S.COLUMN_ITEMS_SUB_TYPE + " AS " + i + S.COLUMN_ITEMS_SUB_TYPE); projectionMap.put(i + S.COLUMN_ITEMS_RARITY, i + "." + S.COLUMN_ITEMS_RARITY + " AS " + i + S.COLUMN_ITEMS_RARITY); projectionMap.put(i + S.COLUMN_ITEMS_CARRY_CAPACITY, i + "." + S.COLUMN_ITEMS_CARRY_CAPACITY + " AS " + i + S.COLUMN_ITEMS_CARRY_CAPACITY); projectionMap.put(i + S.COLUMN_ITEMS_BUY, i + "." + S.COLUMN_ITEMS_BUY + " AS " + i + S.COLUMN_ITEMS_BUY); projectionMap.put(i + S.COLUMN_ITEMS_SELL, i + "." + S.COLUMN_ITEMS_SELL + " AS " + i + S.COLUMN_ITEMS_SELL); projectionMap.put(i + S.COLUMN_ITEMS_DESCRIPTION, i + "." + S.COLUMN_ITEMS_DESCRIPTION + " AS " + i + S.COLUMN_ITEMS_DESCRIPTION); projectionMap.put(i + S.COLUMN_ITEMS_ICON_NAME, i + "." + S.COLUMN_ITEMS_ICON_NAME + " AS " + i + S.COLUMN_ITEMS_ICON_NAME); projectionMap.put(i + S.COLUMN_ITEMS_ARMOR_DUPE_NAME_FIX, i + "." + S.COLUMN_ITEMS_ARMOR_DUPE_NAME_FIX + " AS " + i + S.COLUMN_ITEMS_ARMOR_DUPE_NAME_FIX); } //Create new querybuilder SQLiteQueryBuilder QB = new SQLiteQueryBuilder(); QB.setTables(S.TABLE_COMBINING + " AS c" + " LEFT OUTER JOIN " + S.TABLE_ITEMS + " AS crt" + " ON " + "c." + S.COLUMN_COMBINING_CREATED_ITEM_ID + " = " + "crt." + S.COLUMN_ITEMS_ID + " LEFT OUTER JOIN " + S.TABLE_ITEMS + " AS mat1" + " ON " + "c." + S.COLUMN_COMBINING_ITEM_1_ID + " = " + "mat1." + S.COLUMN_ITEMS_ID + " LEFT OUTER JOIN " + S.TABLE_ITEMS + " AS mat2" + " ON " + "c." + S.COLUMN_COMBINING_ITEM_2_ID + " = " + "mat2." + S.COLUMN_ITEMS_ID); QB.setProjectionMap(projectionMap); return QB; } /* * Get all components for a created item */ public ComponentCursor queryComponentCreated(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_COMPONENTS; qh.Selection = "c." + S.COLUMN_COMPONENTS_CREATED_ITEM_ID + " = ? "; //s" AND " + "c." + S.COLUMN_COMPONENTS_COMPONENT_ITEM_ID + " < " + S.SECTION_ARMOR; qh.SelectionArgs = new String[]{"" + id}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new ComponentCursor(wrapJoinHelper(builderComponent(), qh)); } /* * Get all components for a component item */ public ComponentCursor queryComponentComponent(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_COMPONENTS; qh.Selection = "c." + S.COLUMN_COMPONENTS_COMPONENT_ITEM_ID + " = ? "; qh.SelectionArgs = new String[]{"" + id}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new ComponentCursor(wrapJoinHelper(builderComponent(), qh)); } /* * Get all components for a created item and type */ public ComponentCursor queryComponentCreatedType(long id, String type) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_COMPONENTS; qh.Selection = "c." + S.COLUMN_COMPONENTS_CREATED_ITEM_ID + " = ? " + " AND " + "c." + S.COLUMN_COMPONENTS_TYPE + " = ?"; qh.SelectionArgs = new String[]{"" + id, type}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new ComponentCursor(wrapJoinHelper(builderComponent(), qh)); } /* * Helper method to query for component */ private SQLiteQueryBuilder builderComponent() { // SELECT c._id AS _id, c.created_item_id, c.component_item_id, // c.quantity, c.type, cr.name AS crname, co.name AS coname // FROM components AS c // LEFT OUTER JOIN items AS cr ON c.created_item_id = cr._id // LEFT OUTER JOIN items AS co ON c.component_item_id = co._id; String c = "c"; String cr = "cr"; String co = "co"; HashMap<String, String> projectionMap = new HashMap<String, String>(); projectionMap.put("_id", c + "." + S.COLUMN_COMPONENTS_ID + " AS " + "_id"); projectionMap.put(S.COLUMN_COMPONENTS_CREATED_ITEM_ID, c + "." + S.COLUMN_COMPONENTS_CREATED_ITEM_ID); projectionMap.put(S.COLUMN_COMPONENTS_COMPONENT_ITEM_ID, c + "." + S.COLUMN_COMPONENTS_COMPONENT_ITEM_ID); projectionMap.put(S.COLUMN_COMPONENTS_QUANTITY, c + "." + S.COLUMN_COMPONENTS_QUANTITY); projectionMap.put(S.COLUMN_COMPONENTS_TYPE, c + "." + S.COLUMN_COMPONENTS_TYPE); projectionMap.put(cr + S.COLUMN_ITEMS_NAME, cr + "." + S.COLUMN_ITEMS_NAME + " AS " + cr + S.COLUMN_ITEMS_NAME); projectionMap.put(cr + S.COLUMN_ITEMS_TYPE, cr + "." + S.COLUMN_ITEMS_TYPE + " AS " + cr + S.COLUMN_ITEMS_TYPE); projectionMap.put(cr + S.COLUMN_ITEMS_SUB_TYPE, cr + "." + S.COLUMN_ITEMS_SUB_TYPE + " AS " + cr + S.COLUMN_ITEMS_SUB_TYPE); projectionMap.put(cr + S.COLUMN_ITEMS_RARITY, cr + "." + S.COLUMN_ITEMS_RARITY + " AS " + cr + S.COLUMN_ITEMS_RARITY); projectionMap.put(cr + S.COLUMN_ITEMS_ICON_NAME, cr + "." + S.COLUMN_ITEMS_ICON_NAME + " AS " + cr + S.COLUMN_ITEMS_ICON_NAME); projectionMap.put(co + S.COLUMN_ITEMS_NAME, co + "." + S.COLUMN_ITEMS_NAME + " AS " + co + S.COLUMN_ITEMS_NAME); projectionMap.put(co + S.COLUMN_ITEMS_TYPE, co + "." + S.COLUMN_ITEMS_TYPE + " AS " + co + S.COLUMN_ITEMS_TYPE); projectionMap.put(co + S.COLUMN_ITEMS_ICON_NAME, co + "." + S.COLUMN_ITEMS_ICON_NAME + " AS " + co + S.COLUMN_ITEMS_ICON_NAME); projectionMap.put(co + S.COLUMN_ITEMS_SUB_TYPE, co + "." + S.COLUMN_ITEMS_SUB_TYPE + " AS " + co + S.COLUMN_ITEMS_SUB_TYPE); projectionMap.put(co + S.COLUMN_ITEMS_RARITY, co + "." + S.COLUMN_ITEMS_RARITY + " AS " + co + S.COLUMN_ITEMS_RARITY); //Create new querybuilder SQLiteQueryBuilder QB = new SQLiteQueryBuilder(); QB.setTables(S.TABLE_COMPONENTS + " AS c" + " LEFT OUTER JOIN " + S.TABLE_ITEMS + " AS cr" + " ON " + "c." + S.COLUMN_COMPONENTS_CREATED_ITEM_ID + " = " + "cr." + S.COLUMN_ITEMS_ID + " LEFT OUTER JOIN " + S.TABLE_ITEMS + " AS co " + " ON " + "c." + S.COLUMN_COMPONENTS_COMPONENT_ITEM_ID + " = " + "co." + S.COLUMN_ITEMS_ID); QB.setProjectionMap(projectionMap); return QB; } /* * Get all decorations */ public DecorationCursor queryDecorations() { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_DECORATIONS; qh.Selection = null; qh.SelectionArgs = null; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new DecorationCursor(wrapJoinHelper(builderDecoration(), qh)); } /* * Get a specific decoration */ public DecorationCursor queryDecoration(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_DECORATIONS; qh.Selection = "i._id" + " = ?"; qh.SelectionArgs = new String[]{ String.valueOf(id) }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = "1"; return new DecorationCursor(wrapJoinHelper(builderDecoration(), qh)); } /* * Helper method to query for decorations */ private SQLiteQueryBuilder builderDecoration() { // SELECT i._id AS item_id, i.name, i.jpn_name, i.type, i.rarity, i.carry_capacity, i.buy, i.sell, i.description, // i.icon_name, i.armor_dupe_name_fix, d.num_slots, s1._id AS skill_1_id, s1.name AS skill_1_name, its1.point_value // AS skill_1_point, s2._id AS skill_1_id, s2.name AS skill_2_name, its2.point_value AS skill_2_point // FROM decorations AS d LEFT OUTER JOIN items AS i ON d._id = i._id // LEFT OUTER JOIN item_to_skill_tree AS its1 ON i._id = its1.item_id and its1.point_value > 0 // LEFT OUTER JOIN skill_trees AS s1 ON its1.skill_tree_id = s1._id // LEFT OUTER JOIN item_to_skill_tree AS its2 ON i._id = its2.item_id and s1._id != its2.skill_tree_id // LEFT OUTER JOIN skill_trees AS s2 ON its2.skill_tree_id = s2._id; HashMap<String, String> projectionMap = new HashMap<String, String>(); projectionMap.put("_id", "i." + S.COLUMN_ITEMS_ID + " AS " + "_id"); projectionMap.put("item_name", "i." + S.COLUMN_ITEMS_NAME + " AS " + "item_name"); projectionMap.put(S.COLUMN_ITEMS_JPN_NAME, "i." + S.COLUMN_ITEMS_JPN_NAME); projectionMap.put(S.COLUMN_ITEMS_TYPE, "i." + S.COLUMN_ITEMS_TYPE); projectionMap.put(S.COLUMN_ITEMS_SUB_TYPE, "i." + S.COLUMN_ITEMS_SUB_TYPE); projectionMap.put(S.COLUMN_ITEMS_RARITY, "i." + S.COLUMN_ITEMS_RARITY); projectionMap.put(S.COLUMN_ITEMS_CARRY_CAPACITY, "i." + S.COLUMN_ITEMS_CARRY_CAPACITY); projectionMap.put(S.COLUMN_ITEMS_BUY, "i." + S.COLUMN_ITEMS_BUY); projectionMap.put(S.COLUMN_ITEMS_SELL, "i." + S.COLUMN_ITEMS_SELL); projectionMap.put(S.COLUMN_ITEMS_DESCRIPTION, "i." + S.COLUMN_ITEMS_DESCRIPTION); projectionMap.put(S.COLUMN_ITEMS_ICON_NAME, "i." + S.COLUMN_ITEMS_ICON_NAME); projectionMap.put(S.COLUMN_ITEMS_ARMOR_DUPE_NAME_FIX, "i." + S.COLUMN_ITEMS_ARMOR_DUPE_NAME_FIX); projectionMap.put(S.COLUMN_DECORATIONS_NUM_SLOTS, "d." + S.COLUMN_DECORATIONS_NUM_SLOTS); projectionMap.put("skill_1_id", "s1." + S.COLUMN_SKILL_TREES_ID + " AS " + "skill_1_id"); projectionMap.put("skill_1_name", "s1." + S.COLUMN_SKILL_TREES_NAME + " AS " + "skill_1_name"); projectionMap.put("skill_1_point_value", "its1." + S.COLUMN_ITEM_TO_SKILL_TREE_POINT_VALUE + " AS " + "skill_1_point_value"); projectionMap.put("skill_2_id", "s2." + S.COLUMN_SKILL_TREES_ID + " AS " + "skill_2_id"); projectionMap.put("skill_2_name", "s2." + S.COLUMN_SKILL_TREES_NAME + " AS " + "skill_2_name"); projectionMap.put("skill_2_point_value", "its2." + S.COLUMN_ITEM_TO_SKILL_TREE_POINT_VALUE + " AS " + "skill_2_point_value"); //Create new querybuilder SQLiteQueryBuilder QB = new SQLiteQueryBuilder(); QB.setTables(S.TABLE_DECORATIONS + " AS d" + " LEFT OUTER JOIN " + S.TABLE_ITEMS + " AS i" + " ON " + "d." + S.COLUMN_DECORATIONS_ID + " = " + "i." + S.COLUMN_ITEMS_ID + " LEFT OUTER JOIN " + S.TABLE_ITEM_TO_SKILL_TREE + " AS its1 " + " ON " + "i." + S.COLUMN_ITEMS_ID + " = " + "its1." + S.COLUMN_ITEM_TO_SKILL_TREE_ITEM_ID + " AND " + "its1." + S.COLUMN_ITEM_TO_SKILL_TREE_POINT_VALUE + " > 0 " + " LEFT OUTER JOIN " + S.TABLE_SKILL_TREES + " AS s1" + " ON " + "its1." + S.COLUMN_ITEM_TO_SKILL_TREE_SKILL_TREE_ID + " = " + "s1." + S.COLUMN_SKILL_TREES_ID + " LEFT OUTER JOIN " + S.TABLE_ITEM_TO_SKILL_TREE + " AS its2 " + " ON " + "i." + S.COLUMN_ITEMS_ID + " = " + "its2." + S.COLUMN_ITEM_TO_SKILL_TREE_ITEM_ID + " AND " + "s1." + S.COLUMN_SKILL_TREES_ID + " != " + "its2." + S.COLUMN_ITEM_TO_SKILL_TREE_SKILL_TREE_ID + " LEFT OUTER JOIN " + S.TABLE_SKILL_TREES + " AS s2" + " ON " + "its2." + S.COLUMN_ITEM_TO_SKILL_TREE_SKILL_TREE_ID + " = " + "s2." + S.COLUMN_SKILL_TREES_ID ); QB.setProjectionMap(projectionMap); return QB; } /* * Get all gathering locations based on item */ public GatheringCursor queryGatheringItem(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_GATHERING; qh.Selection = "g." + S.COLUMN_GATHERING_ITEM_ID + " = ? "; qh.SelectionArgs = new String[]{"" + id}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new GatheringCursor(wrapJoinHelper(builderGathering(), qh)); } /* * Get all gathering items based on location */ public GatheringCursor queryGatheringLocation(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_GATHERING; qh.Selection = "g." + S.COLUMN_GATHERING_LOCATION_ID + " = ? "; qh.SelectionArgs = new String[]{"" + id}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new GatheringCursor(wrapJoinHelper(builderGathering(), qh)); } /* * Get all gathering items based on location and rank */ public GatheringCursor queryGatheringLocationRank(long id, String rank) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_GATHERING; qh.Selection = "g." + S.COLUMN_GATHERING_LOCATION_ID + " = ? " + "AND " + "g." + S.COLUMN_GATHERING_RANK + " = ? "; qh.SelectionArgs = new String[]{"" + id, rank}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new GatheringCursor(wrapJoinHelper(builderGathering(), qh)); } /* * Helper method to query for Gathering */ private SQLiteQueryBuilder builderGathering() { // SELECT g._id AS _id, g.item_id, g.location_id, g.area, // g.site, g.site_set, g.site_set_percentage, // g.site_set_gathers_min, g.site_set_gathers_max, g.rank, // g.percentage, i.name AS iname, l.name AS lname // FROM gathering AS g // LEFT OUTER JOIN items AS i ON g.item_id = i._id // LEFT OUTER JOIN locations AS l on g.location_id = l._id; String g = "g"; String i = "i"; String l = "l"; HashMap<String, String> projectionMap = new HashMap<String, String>(); projectionMap.put("_id", g + "." + S.COLUMN_GATHERING_ID + " AS " + "_id"); projectionMap.put(S.COLUMN_GATHERING_ITEM_ID, g + "." + S.COLUMN_GATHERING_ITEM_ID); projectionMap.put(S.COLUMN_GATHERING_LOCATION_ID, g + "." + S.COLUMN_GATHERING_LOCATION_ID); projectionMap.put(S.COLUMN_GATHERING_AREA, g + "." + S.COLUMN_GATHERING_AREA); projectionMap.put(S.COLUMN_GATHERING_SITE, g + "." + S.COLUMN_GATHERING_SITE); projectionMap.put(S.COLUMN_GATHERING_RANK, g + "." + S.COLUMN_GATHERING_RANK); projectionMap.put(S.COLUMN_GATHERING_RATE, g + "." + S.COLUMN_GATHERING_RATE); projectionMap.put(i + S.COLUMN_ITEMS_NAME, i + "." + S.COLUMN_ITEMS_NAME + " AS " + i + S.COLUMN_ITEMS_NAME); projectionMap.put(S.COLUMN_ITEMS_ICON_NAME, i + "." + S.COLUMN_ITEMS_ICON_NAME); projectionMap.put(l + S.COLUMN_LOCATIONS_NAME, l + "." + S.COLUMN_LOCATIONS_NAME + " AS " + l + S.COLUMN_LOCATIONS_NAME); projectionMap.put(l + S.COLUMN_LOCATIONS_MAP, l + "." + S.COLUMN_LOCATIONS_MAP + " AS " + l + S.COLUMN_LOCATIONS_MAP); //Create new querybuilder SQLiteQueryBuilder QB = new SQLiteQueryBuilder(); QB.setTables(S.TABLE_GATHERING + " AS g" + " LEFT OUTER JOIN " + S.TABLE_ITEMS + " AS i" + " ON " + "g." + S.COLUMN_GATHERING_ITEM_ID + " = " + "i." + S.COLUMN_ITEMS_ID + " LEFT OUTER JOIN " + S.TABLE_LOCATIONS + " AS l " + " ON " + "g." + S.COLUMN_GATHERING_LOCATION_ID + " = " + "l." + S.COLUMN_LOCATIONS_ID); QB.setProjectionMap(projectionMap); return QB; } /* * Get all hunting fleets */ public HuntingFleetCursor queryHuntingFleets() { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_HUNTING_FLEET; qh.Selection = null; qh.SelectionArgs = null; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new HuntingFleetCursor(wrapJoinHelper(builderHuntingFleet(), qh)); } /* * Get a specific hunting fleet */ public HuntingFleetCursor queryHuntingFleet(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_HUNTING_FLEET; qh.Selection = "h." + S.COLUMN_HUNTING_FLEET_ID + " = ?"; qh.SelectionArgs = new String[]{ String.valueOf(id) }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = "1"; return new HuntingFleetCursor(wrapJoinHelper(builderHuntingFleet(), qh)); } /* * Get a specific hunting fleet based on type */ public HuntingFleetCursor queryHuntingFleetType(String type) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_HUNTING_FLEET; qh.Selection = "h." + S.COLUMN_HUNTING_FLEET_TYPE + " = ?"; qh.SelectionArgs = new String[]{ type }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new HuntingFleetCursor(wrapJoinHelper(builderHuntingFleet(), qh)); } /* * Get a specific hunting fleet based on location */ public HuntingFleetCursor queryHuntingFleetLocation(String location) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Selection = "h." + S.COLUMN_HUNTING_FLEET_LOCATION + " = ?"; qh.SelectionArgs = new String[]{ location }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new HuntingFleetCursor(wrapJoinHelper(builderHuntingFleet(), qh)); } /* * Helper method to query for hunting fleets */ private SQLiteQueryBuilder builderHuntingFleet() { // SELECT h._id AS _id, h.type AS htype, h.level, h.location, h.amount, h.percentage, h.rank, // h.item_id, i.name, i.jpn_name, i.type, i.rarity, i.carry_capacity, i.buy, i.sell, // i.description, i.icon_name, i.armor_dupe_name_fix // FROM hunting_fleet AS h LEFT OUTER JOIN items AS i ON h.item_id = i._id; String h = "h"; String i = "i"; HashMap<String, String> projectionMap = new HashMap<String, String>(); projectionMap.put("_id", h + "." + S.COLUMN_HUNTING_FLEET_ID + " AS " + "_id"); projectionMap.put(h + S.COLUMN_HUNTING_FLEET_TYPE, h + "." + S.COLUMN_HUNTING_FLEET_TYPE + " AS " + h + S.COLUMN_HUNTING_FLEET_TYPE); projectionMap.put(S.COLUMN_HUNTING_FLEET_LEVEL, h + "." + S.COLUMN_HUNTING_FLEET_LEVEL); projectionMap.put(S.COLUMN_HUNTING_FLEET_LOCATION, h + "." + S.COLUMN_HUNTING_FLEET_LOCATION); projectionMap.put(S.COLUMN_HUNTING_FLEET_AMOUNT, h + "." + S.COLUMN_HUNTING_FLEET_AMOUNT); projectionMap.put(S.COLUMN_HUNTING_FLEET_PERCENTAGE, h + "." + S.COLUMN_HUNTING_FLEET_PERCENTAGE); projectionMap.put(S.COLUMN_HUNTING_FLEET_RANK, h + "." + S.COLUMN_HUNTING_FLEET_RANK); projectionMap.put(S.COLUMN_HUNTING_FLEET_ITEM_ID, h + "." + S.COLUMN_HUNTING_FLEET_ITEM_ID); projectionMap.put(S.COLUMN_ITEMS_NAME, i + "." + S.COLUMN_ITEMS_NAME); projectionMap.put(S.COLUMN_ITEMS_JPN_NAME, i + "." + S.COLUMN_ITEMS_JPN_NAME); projectionMap.put(i + S.COLUMN_ITEMS_TYPE, i + "." + S.COLUMN_ITEMS_TYPE + " AS " + i + S.COLUMN_ITEMS_TYPE); projectionMap.put(S.COLUMN_ITEMS_RARITY, i + "." + S.COLUMN_ITEMS_RARITY); projectionMap.put(S.COLUMN_ITEMS_CARRY_CAPACITY, i + "." + S.COLUMN_ITEMS_CARRY_CAPACITY); projectionMap.put(S.COLUMN_ITEMS_BUY, i + "." + S.COLUMN_ITEMS_BUY); projectionMap.put(S.COLUMN_ITEMS_SELL, i + "." + S.COLUMN_ITEMS_SELL); projectionMap.put(S.COLUMN_ITEMS_DESCRIPTION, i + "." + S.COLUMN_ITEMS_DESCRIPTION); projectionMap.put(S.COLUMN_ITEMS_ICON_NAME, i + "." + S.COLUMN_ITEMS_ICON_NAME); projectionMap.put(S.COLUMN_ITEMS_ARMOR_DUPE_NAME_FIX, i + "." + S.COLUMN_ITEMS_ARMOR_DUPE_NAME_FIX); //Create new querybuilder SQLiteQueryBuilder QB = new SQLiteQueryBuilder(); QB.setTables(S.TABLE_HUNTING_FLEET + " AS h" + " LEFT OUTER JOIN " + S.TABLE_ITEMS + " AS i" + " ON " + "h." + S.COLUMN_HUNTING_FLEET_ITEM_ID + " = " + "i." + S.COLUMN_ITEMS_ID); QB.setProjectionMap(projectionMap); return QB; } /* * Get all hunting reward monsters based on item */ public HuntingRewardCursor queryHuntingRewardItem(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_HUNTING_REWARDS; qh.Selection = "h." + S.COLUMN_HUNTING_REWARDS_ITEM_ID + " = ? "; qh.SelectionArgs = new String[]{"" + id}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = "m." + S.COLUMN_MONSTERS_ID + " ASC, " + "h." + S.COLUMN_HUNTING_REWARDS_RANK + " DESC, " + "h." + S.COLUMN_HUNTING_REWARDS_ID + " ASC"; qh.Limit = null; return new HuntingRewardCursor(wrapJoinHelper(builderHuntingReward(), qh)); } /* * Get all hunting reward items based on monster */ public HuntingRewardCursor queryHuntingRewardMonster(long[] ids) { String[] string_list = new String[ids.length]; for(int i = 0; i < ids.length; i++){ string_list[i] = String.valueOf(ids[i]); } QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_HUNTING_REWARDS; qh.Selection = "h." + S.COLUMN_HUNTING_REWARDS_MONSTER_ID + " IN (" + makePlaceholders(ids.length) + ")"; qh.SelectionArgs = string_list; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new HuntingRewardCursor(wrapJoinHelper(builderHuntingReward(), qh)); } /* * Get all hunting reward items based on monster and rank */ public HuntingRewardCursor queryHuntingRewardMonsterRank(long[] ids, String rank) { String[] string_list = new String[ids.length + 1]; for(int i = 0; i < ids.length; i++){ string_list[i] = String.valueOf(ids[i]); } string_list[ids.length] = rank; QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_HUNTING_REWARDS; qh.Selection = "h." + S.COLUMN_HUNTING_REWARDS_MONSTER_ID + " IN (" + makePlaceholders(ids.length) + ")" + " AND " + "h." + S.COLUMN_HUNTING_REWARDS_RANK + " = ? "; qh.SelectionArgs = string_list; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new HuntingRewardCursor(wrapJoinHelper(builderHuntingReward(), qh)); } /* * Helper method to query for HuntingReward */ private SQLiteQueryBuilder builderHuntingReward() { // SELECT h._id AS _id, h.item_id, h.monster_id, // h.condition, h.rank, h.stack_size, h.percentage, // i.name AS iname, m.name AS mname // FROM hunting_rewards AS h // LEFT OUTER JOIN items AS i ON h.item_id = i._id // LEFT OUTER JOIN monsters AS m ON h.monster_id = m._id; String h = "h"; String i = "i"; String m = "m"; HashMap<String, String> projectionMap = new HashMap<String, String>(); projectionMap.put("_id", h + "." + S.COLUMN_HUNTING_REWARDS_ID + " AS " + "_id"); projectionMap.put(S.COLUMN_HUNTING_REWARDS_ITEM_ID, h + "." + S.COLUMN_HUNTING_REWARDS_ITEM_ID); projectionMap.put(S.COLUMN_HUNTING_REWARDS_MONSTER_ID, h + "." + S.COLUMN_HUNTING_REWARDS_MONSTER_ID); projectionMap.put(S.COLUMN_HUNTING_REWARDS_CONDITION, h + "." + S.COLUMN_HUNTING_REWARDS_CONDITION); projectionMap.put(S.COLUMN_HUNTING_REWARDS_RANK, h + "." + S.COLUMN_HUNTING_REWARDS_RANK); projectionMap.put(S.COLUMN_HUNTING_REWARDS_STACK_SIZE, h + "." + S.COLUMN_HUNTING_REWARDS_STACK_SIZE); projectionMap.put(S.COLUMN_HUNTING_REWARDS_PERCENTAGE, h + "." + S.COLUMN_HUNTING_REWARDS_PERCENTAGE); projectionMap.put(i + S.COLUMN_ITEMS_NAME, i + "." + S.COLUMN_ITEMS_NAME + " AS " + i + S.COLUMN_ITEMS_NAME); projectionMap.put(i + S.COLUMN_ITEMS_ICON_NAME, i + "." + S.COLUMN_ITEMS_ICON_NAME + " AS " + i + S.COLUMN_ITEMS_ICON_NAME); projectionMap.put(m + S.COLUMN_MONSTERS_NAME, m + "." + S.COLUMN_MONSTERS_NAME + " AS " + m + S.COLUMN_MONSTERS_NAME); projectionMap.put(S.COLUMN_MONSTERS_TRAIT, m + "." + S.COLUMN_MONSTERS_TRAIT); projectionMap.put(m + S.COLUMN_MONSTERS_FILE_LOCATION, m + "." + S.COLUMN_MONSTERS_FILE_LOCATION + " AS " + m + S.COLUMN_MONSTERS_FILE_LOCATION); //Create new querybuilder SQLiteQueryBuilder QB = new SQLiteQueryBuilder(); QB.setTables(S.TABLE_HUNTING_REWARDS + " AS h" + " LEFT OUTER JOIN " + S.TABLE_ITEMS + " AS i" + " ON " + "h." + S.COLUMN_HUNTING_REWARDS_ITEM_ID + " = " + "i." + S.COLUMN_ITEMS_ID + " LEFT OUTER JOIN " + S.TABLE_MONSTERS + " AS m " + " ON " + "h." + S.COLUMN_HUNTING_REWARDS_MONSTER_ID + " = " + "m." + S.COLUMN_MONSTERS_ID); QB.setProjectionMap(projectionMap); return QB; } /* * Get all items */ public ItemCursor queryItems() { // SELECT DISTINCT * FROM items ORDER BY _id QueryHelper qh = new QueryHelper(); qh.Distinct = true; qh.Table = S.TABLE_ITEMS; qh.Columns = null; qh.Selection = null; qh.SelectionArgs = null; qh.GroupBy = null; qh.Having = null; qh.OrderBy = S.COLUMN_ITEMS_ID; qh.Limit = null; return new ItemCursor(wrapHelper(qh)); } /* * Get a specific item */ public ItemCursor queryItem(long id) { // "SELECT DISTINCT * FROM items WHERE _id = id LIMIT 1" QueryHelper qh = new QueryHelper(); qh.Distinct = false; qh.Table = S.TABLE_ITEMS; qh.Columns = null; qh.Selection = S.COLUMN_ITEMS_ID + " = ?"; qh.SelectionArgs = new String[]{ String.valueOf(id) }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = "1"; return new ItemCursor(wrapHelper(qh)); } /* * Get items based on search text */ public ItemCursor queryItemSearch(String search) { // "SELECT * FROM items WHERE name LIKE %?%" QueryHelper qh = new QueryHelper(); qh.Distinct = false; qh.Table = S.TABLE_ITEMS; qh.Columns = null; qh.Selection = S.COLUMN_ITEMS_NAME + " LIKE ?"; qh.SelectionArgs = new String[]{ '%' + search + '%'}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new ItemCursor(wrapHelper(qh)); } /* * Get all skills based on item */ public ItemToSkillTreeCursor queryItemToSkillTreeItem(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Selection = "itst." + S.COLUMN_ITEM_TO_SKILL_TREE_ITEM_ID + " = ? "; qh.SelectionArgs = new String[]{"" + id}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new ItemToSkillTreeCursor(wrapJoinHelper(builderItemToSkillTree(), qh)); } /* * Get all items based on skill tree */ public ItemToSkillTreeCursor queryItemToSkillTreeSkillTree(long id, String type) { String queryType = ""; if (type.equals("Decoration")) { queryType = "i." + S.COLUMN_ITEMS_TYPE; } else { queryType = "a." + S.COLUMN_ARMOR_SLOT; } QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_ITEM_TO_SKILL_TREE; qh.Selection = "itst." + S.COLUMN_ITEM_TO_SKILL_TREE_SKILL_TREE_ID + " = ? " + " AND " + queryType + " = ? "; qh.SelectionArgs = new String[]{"" + id, type}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new ItemToSkillTreeCursor(wrapJoinHelper(builderItemToSkillTree(), qh)); } /* * Helper method to query for ItemToSkillTree */ private SQLiteQueryBuilder builderItemToSkillTree() { // SELECT itst._id AS _id, itst.item_id, itst.skill_tree_id, // itst.point_value, i.name AS iname, s.name AS sname // FROM item_to_skill_tree AS itst // LEFT OUTER JOIN items AS i ON itst.item_id = i._id // LEFT OUTER JOIN skill_trees AS s ON itst.skill_tree_id = s._id; // LEFT OUTER JOIN armor AS a ON i._id = a._id // LEFT OUTER JOIN decorations AS d ON i._id = d._id; String itst = "itst"; String i = "i"; String s = "s"; HashMap<String, String> projectionMap = new HashMap<String, String>(); projectionMap.put("_id", itst + "." + S.COLUMN_ITEM_TO_SKILL_TREE_ID + " AS " + "_id"); projectionMap.put(S.COLUMN_ITEM_TO_SKILL_TREE_ITEM_ID, itst + "." + S.COLUMN_ITEM_TO_SKILL_TREE_ITEM_ID); projectionMap.put(S.COLUMN_ITEM_TO_SKILL_TREE_SKILL_TREE_ID, itst + "." + S.COLUMN_ITEM_TO_SKILL_TREE_SKILL_TREE_ID); projectionMap.put(S.COLUMN_ITEM_TO_SKILL_TREE_POINT_VALUE, itst + "." + S.COLUMN_ITEM_TO_SKILL_TREE_POINT_VALUE); projectionMap.put(i + S.COLUMN_ITEMS_NAME, i + "." + S.COLUMN_ITEMS_NAME + " AS " + i + S.COLUMN_ITEMS_NAME); projectionMap.put(S.COLUMN_ITEMS_ICON_NAME, i + "." + S.COLUMN_ITEMS_ICON_NAME); projectionMap.put(S.COLUMN_ITEMS_TYPE, i + "." + S.COLUMN_ITEMS_TYPE); projectionMap.put(S.COLUMN_ITEMS_SUB_TYPE, i + "." + S.COLUMN_ITEMS_SUB_TYPE); projectionMap.put(S.COLUMN_ITEMS_RARITY, i + "." + S.COLUMN_ITEMS_RARITY); projectionMap.put(s + S.COLUMN_SKILL_TREES_NAME, s + "." + S.COLUMN_SKILL_TREES_NAME + " AS " + s + S.COLUMN_SKILL_TREES_NAME); //Create new querybuilder SQLiteQueryBuilder QB = new SQLiteQueryBuilder(); QB.setTables(S.TABLE_ITEM_TO_SKILL_TREE + " AS itst" + " LEFT OUTER JOIN " + S.TABLE_ITEMS + " AS i" + " ON " + "itst." + S.COLUMN_ITEM_TO_SKILL_TREE_ITEM_ID + " = " + "i." + S.COLUMN_ITEMS_ID + " LEFT OUTER JOIN " + S.TABLE_SKILL_TREES + " AS s " + " ON " + "itst." + S.COLUMN_ITEM_TO_SKILL_TREE_SKILL_TREE_ID + " = " + "s." + S.COLUMN_SKILL_TREES_ID + " LEFT OUTER JOIN " + S.TABLE_ARMOR + " AS a" + " ON " + "i." + S.COLUMN_ITEMS_ID + " = " + "a." + S.COLUMN_ARMOR_ID + " LEFT OUTER JOIN " + S.TABLE_DECORATIONS + " AS d" + " ON " + "i." + S.COLUMN_ITEMS_ID + " = " + "d." + S.COLUMN_DECORATIONS_ID); QB.setProjectionMap(projectionMap); return QB; } /* * Get all locations */ public LocationCursor queryLocations() { // "SELECT DISTINCT * FROM locations GROUP BY name" QueryHelper qh = new QueryHelper(); qh.Distinct = true; qh.Table = S.TABLE_LOCATIONS; qh.Columns = null; qh.Selection = null; qh.SelectionArgs = null; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new LocationCursor(wrapHelper(qh)); } /* * Get a specific location */ public LocationCursor queryLocation(long id) { // "SELECT DISTINCT * FROM locations WHERE _id = id LIMIT 1" QueryHelper qh = new QueryHelper(); qh.Distinct = false; qh.Table = S.TABLE_LOCATIONS; qh.Columns = null; qh.Selection = S.COLUMN_LOCATIONS_ID + " = ?"; qh.SelectionArgs = new String[]{ String.valueOf(id) }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = "1"; return new LocationCursor(wrapHelper(qh)); } /* * Get all moga woods reward monsters based on item */ public MogaWoodsRewardCursor queryMogaWoodsRewardItem(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_MOGA_WOODS_REWARDS; qh.Selection = "mwr." + S.COLUMN_MOGA_WOODS_REWARDS_ITEM_ID + " = ? "; qh.SelectionArgs = new String[]{"" + id}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new MogaWoodsRewardCursor(wrapJoinHelper(builderMogaWoodsReward(), qh)); } /* * Get all moga woods reward items based on monster */ public MogaWoodsRewardCursor queryMogaWoodsRewardMonster(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Selection = "mwr." + S.COLUMN_MOGA_WOODS_REWARDS_MONSTER_ID + " = ? "; qh.SelectionArgs = new String[]{"" + id}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new MogaWoodsRewardCursor(wrapJoinHelper(builderMogaWoodsReward(), qh)); } /* * Get all moga woods reward items based on monster and time */ public MogaWoodsRewardCursor queryMogaWoodsRewardMonsterTime(long id, String time) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_MOGA_WOODS_REWARDS; qh.Selection = "mwr." + S.COLUMN_MOGA_WOODS_REWARDS_MONSTER_ID + " = ? " + "AND " + "mwr." + S.COLUMN_MOGA_WOODS_REWARDS_TIME + " = ? "; qh.SelectionArgs = new String[]{"" + id, time}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new MogaWoodsRewardCursor(wrapJoinHelper(builderMogaWoodsReward(), qh)); } /* * Helper method to query for MogaWoods */ private SQLiteQueryBuilder builderMogaWoodsReward() { // SELECT mwr._id AS _id, mwr.monster_id, mwr.item_id, // mwr.time, mwr.commodity_stars, mwr.kill_percentage, // mwr.capture_percentage, // i.name AS iname, m.name AS mname // FROM moga_woods_rewards AS mwr // LEFT OUTER JOIN monsters AS m ON mwr.monster_id = m._id // LEFT OUTER JOIN items AS i ON mwr.item_id = i._id; String mwr = "mwr"; String i = "i"; String m = "m"; HashMap<String, String> projectionMap = new HashMap<String, String>(); projectionMap.put("_id", mwr + "." + S.COLUMN_MOGA_WOODS_REWARDS_ID + " AS " + "_id"); projectionMap.put(S.COLUMN_MOGA_WOODS_REWARDS_ITEM_ID, mwr + "." + S.COLUMN_MOGA_WOODS_REWARDS_ITEM_ID); projectionMap.put(S.COLUMN_MOGA_WOODS_REWARDS_MONSTER_ID, mwr + "." + S.COLUMN_MOGA_WOODS_REWARDS_MONSTER_ID); projectionMap.put(S.COLUMN_MOGA_WOODS_REWARDS_TIME, mwr + "." + S.COLUMN_MOGA_WOODS_REWARDS_TIME); projectionMap.put(S.COLUMN_MOGA_WOODS_REWARDS_COMMODITY_STARS, mwr + "." + S.COLUMN_MOGA_WOODS_REWARDS_COMMODITY_STARS); projectionMap.put(S.COLUMN_MOGA_WOODS_REWARDS_KILL_PERCENTAGE, mwr + "." + S.COLUMN_MOGA_WOODS_REWARDS_KILL_PERCENTAGE); projectionMap.put(S.COLUMN_MOGA_WOODS_REWARDS_CAPTURE_PERCENTAGE, mwr + "." + S.COLUMN_MOGA_WOODS_REWARDS_CAPTURE_PERCENTAGE); projectionMap.put(i + S.COLUMN_ITEMS_NAME, i + "." + S.COLUMN_ITEMS_NAME + " AS " + i + S.COLUMN_ITEMS_NAME); projectionMap.put(i + S.COLUMN_ITEMS_ICON_NAME, i + "." + S.COLUMN_ITEMS_ICON_NAME + " AS " + i + S.COLUMN_ITEMS_ICON_NAME); projectionMap.put(m + S.COLUMN_MONSTERS_NAME, m + "." + S.COLUMN_MONSTERS_NAME + " AS " + m + S.COLUMN_MONSTERS_NAME); projectionMap.put(m + S.COLUMN_MONSTERS_FILE_LOCATION, m + "." + S.COLUMN_MONSTERS_FILE_LOCATION + " AS " + m + S.COLUMN_MONSTERS_FILE_LOCATION); //Create new querybuilder SQLiteQueryBuilder QB = new SQLiteQueryBuilder(); QB.setTables(S.TABLE_MOGA_WOODS_REWARDS + " AS mwr" + " LEFT OUTER JOIN " + S.TABLE_ITEMS + " AS i" + " ON " + "mwr." + S.COLUMN_MOGA_WOODS_REWARDS_ITEM_ID + " = " + "i." + S.COLUMN_ITEMS_ID + " LEFT OUTER JOIN " + S.TABLE_MONSTERS + " AS m " + " ON " + "mwr." + S.COLUMN_MOGA_WOODS_REWARDS_MONSTER_ID + " = " + "m." + S.COLUMN_MONSTERS_ID); QB.setProjectionMap(projectionMap); return QB; } /* * Get all monsters */ public MonsterCursor queryMonsters() { // "SELECT DISTINCT * FROM monsters GROUP BY name" QueryHelper qh = new QueryHelper(); qh.Distinct = true; qh.Table = S.TABLE_MONSTERS; qh.Columns = null; qh.Selection = S.COLUMN_MONSTERS_TRAIT + " = '' "; qh.SelectionArgs = null; qh.GroupBy = S.COLUMN_MONSTERS_SORT_NAME; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new MonsterCursor(wrapHelper(qh)); } /* * Get all small monsters */ public MonsterCursor querySmallMonsters() { // "SELECT DISTINCT * FROM monsters WHERE class = 'Minion' GROUP BY name" QueryHelper qh = new QueryHelper(); qh.Distinct = true; qh.Table = S.TABLE_MONSTERS; qh.Columns = null; qh.Selection = S.COLUMN_MONSTERS_CLASS + " = ?" + " AND " + S.COLUMN_MONSTERS_TRAIT + " = '' "; qh.SelectionArgs = new String[] {"Minion"}; qh.GroupBy = S.COLUMN_MONSTERS_SORT_NAME; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new MonsterCursor(wrapHelper(qh)); } /* * Get all large monsters */ public MonsterCursor queryLargeMonsters() { // "SELECT DISTINCT * FROM monsters WHERE class = 'Boss' GROUP BY name" QueryHelper qh = new QueryHelper(); qh.Distinct = true; qh.Table = S.TABLE_MONSTERS; qh.Columns = null; qh.Selection = S.COLUMN_MONSTERS_CLASS + " = ?" + " AND " + S.COLUMN_MONSTERS_TRAIT + " = '' "; qh.SelectionArgs = new String[] {"Boss"}; qh.GroupBy = S.COLUMN_MONSTERS_SORT_NAME; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new MonsterCursor(wrapHelper(qh)); } /* * Get a specific monster */ public MonsterCursor queryMonster(long id) { // "SELECT DISTINCT * FROM monsters WHERE _id = id LIMIT 1" QueryHelper qh = new QueryHelper(); qh.Distinct = false; qh.Table = S.TABLE_MONSTERS; qh.Columns = null; qh.Selection = S.COLUMN_MONSTERS_ID + " = ?"; qh.SelectionArgs = new String[]{ String.valueOf(id) }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = "1"; return new MonsterCursor(wrapHelper(qh)); } /* * Get all traits from same monsters */ public MonsterCursor queryMonsterTrait(String name) { // "SELECT * FROM monsters WHERE _id = ? AND trait != ''" QueryHelper qh = new QueryHelper(); qh.Distinct = true; qh.Table = S.TABLE_MONSTERS; qh.Columns = null; qh.Selection = S.COLUMN_MONSTERS_NAME + " = ?" + " AND " + S.COLUMN_MONSTERS_TRAIT + " != '' "; qh.SelectionArgs = new String[] {name}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new MonsterCursor(wrapHelper(qh)); } /** * Get a cursor with a query to grab all habitats of a monster * @param id id of the monster to query * @return A habitat cursor */ public MonsterHabitatCursor queryHabitatMonster(long id) { // Select * FROM monster_habitat WHERE monster_id = id QueryHelper qh = new QueryHelper(); qh.Distinct = true; qh.Table = S.TABLE_HABITAT; qh.Columns = null; qh.Selection = S.COLUMN_HABITAT_MONSTER_ID + " = ?"; qh.SelectionArgs = new String[]{ String.valueOf(id) }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new MonsterHabitatCursor(wrapJoinHelper(builderHabitat(qh.Distinct),qh)); } /** * Get a cursor with a query to grab all monsters by a location * @param id id of the location to query * @return A habitat cursor */ public MonsterHabitatCursor queryHabitatLocation(long id) { // Select * FROM monster_habitat WHERE location_id = id QueryHelper qh = new QueryHelper(); qh.Distinct = true; qh.Table = S.TABLE_HABITAT; qh.Columns = null; qh.Selection = S.COLUMN_HABITAT_LOCATION_ID + " = ?"; qh.SelectionArgs = new String[]{ String.valueOf(id) }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new MonsterHabitatCursor(wrapJoinHelper(builderHabitat(qh.Distinct),qh)); } /* * Helper method to query for Habitat/Monster/Location */ private SQLiteQueryBuilder builderHabitat(boolean Distinct) { String h = "h"; String m = "m"; String l = "l"; HashMap<String, String> projectionMap = new HashMap<String, String>(); projectionMap.put("_id", h + "." + S.COLUMN_HABITAT_ID + " AS " + "_id"); projectionMap.put("start_area", h + "." + S.COLUMN_HABITAT_START + " AS " + "start_area"); projectionMap.put("move_area", h + "." + S.COLUMN_HABITAT_AREAS + " AS " + "move_area"); projectionMap.put("rest_area", h + "." + S.COLUMN_HABITAT_REST + " AS " + "rest_area"); projectionMap.put(l + S.COLUMN_LOCATIONS_ID, l + "." + S.COLUMN_LOCATIONS_ID + " AS " + l + S.COLUMN_LOCATIONS_ID ); projectionMap.put(l + S.COLUMN_LOCATIONS_NAME, l + "." + S.COLUMN_LOCATIONS_NAME + " AS " + l + S.COLUMN_LOCATIONS_NAME ); projectionMap.put(l + S.COLUMN_LOCATIONS_MAP, l + "." + S.COLUMN_LOCATIONS_MAP + " AS " + l + S.COLUMN_LOCATIONS_MAP ); projectionMap.put(m + S.COLUMN_MONSTERS_ID, m+ "." + S.COLUMN_MONSTERS_ID + " AS " + m + S.COLUMN_MONSTERS_ID); projectionMap.put(m + S.COLUMN_MONSTERS_NAME, m + "." + S.COLUMN_MONSTERS_NAME + " AS " + m + S.COLUMN_MONSTERS_NAME); projectionMap.put(m + S.COLUMN_MONSTERS_CLASS, m + "." + S.COLUMN_MONSTERS_CLASS + " AS " + m + S.COLUMN_MONSTERS_CLASS ); projectionMap.put(m + S.COLUMN_MONSTERS_FILE_LOCATION, m + "." + S.COLUMN_MONSTERS_FILE_LOCATION + " AS " + m + S.COLUMN_MONSTERS_FILE_LOCATION ); //Create new querybuilder SQLiteQueryBuilder QB = new SQLiteQueryBuilder(); QB.setTables(S.TABLE_HABITAT + " AS h" + " LEFT OUTER JOIN " + S.TABLE_MONSTERS + " AS m" + " ON " + "h." + S.COLUMN_HABITAT_MONSTER_ID + " = " + "m." + S.COLUMN_MONSTERS_ID + " LEFT OUTER JOIN " + S.TABLE_LOCATIONS + " AS l " + " ON " + "h." + S.COLUMN_HABITAT_LOCATION_ID + " = " + "l." + S.COLUMN_LOCATIONS_ID); QB.setDistinct(Distinct); QB.setProjectionMap(projectionMap); return QB; } /* * Get all monster status info for a monster * @param id The monster id */ public MonsterStatusCursor queryMonsterStatus(long id) { // "SELECT * FROM monster_status WHERE monster_id = id" QueryHelper qh = new QueryHelper(); qh.Distinct = false; qh.Table = S.TABLE_MONSTER_STATUS; qh.Columns = null; qh.Selection = S.COLUMN_MONSTER_STATUS_MONSTER_ID + " = ?"; qh.SelectionArgs = new String[]{ String.valueOf(id) }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new MonsterStatusCursor(wrapHelper(qh)); } /* * Get all monster damage for a monster */ public MonsterDamageCursor queryMonsterDamage(long id) { // "SELECT * FROM monster_damage WHERE monster_id = id" QueryHelper qh = new QueryHelper(); qh.Distinct = false; qh.Table = S.TABLE_MONSTER_DAMAGE; qh.Columns = null; qh.Selection = S.COLUMN_MONSTER_DAMAGE_MONSTER_ID + " = ?"; qh.SelectionArgs = new String[]{ String.valueOf(id) }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new MonsterDamageCursor(wrapHelper(qh)); } /* * Get all arena quests based on monster */ public MonsterToArenaCursor queryMonsterToArenaMonster(long id) { QueryHelper qh = new QueryHelper(); qh.Distinct = true; qh.Table = S.TABLE_MONSTER_TO_ARENA; qh.Columns = null; qh.Selection = "mta." + S.COLUMN_MONSTER_TO_ARENA_MONSTER_ID + " = ? "; qh.SelectionArgs = new String[]{"" + id}; qh.GroupBy = "a." + S.COLUMN_ARENA_QUESTS_NAME; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new MonsterToArenaCursor(wrapJoinHelper(builderMonsterToArena(qh.Distinct), qh)); } /* * Get all monsters based on arena quest */ public MonsterToArenaCursor queryMonsterToArenaArena(long id) { QueryHelper qh = new QueryHelper(); qh.Distinct = false; qh.Table = S.TABLE_MONSTER_TO_ARENA; qh.Columns = null; qh.Selection = "mta." + S.COLUMN_MONSTER_TO_ARENA_ARENA_ID + " = ? "; qh.SelectionArgs = new String[]{"" + id}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new MonsterToArenaCursor(wrapJoinHelper(builderMonsterToArena(qh.Distinct), qh)); } /* * Helper method to query for MonsterToArena */ private SQLiteQueryBuilder builderMonsterToArena(boolean Distinct) { // SELECT mta._id AS _id, mta.monster_id, mta.arena_id, // m.name AS mname, a.name AS aname, // FROM monster_to_arena AS mta // LEFT OUTER JOIN monsters AS m ON mta.monster_id = m._id // LEFT OUTER JOIN arena_quests AS a ON mta.arena_id = a._id; String mta = "mta"; String m = "m"; String a = "a"; HashMap<String, String> projectionMap = new HashMap<String, String>(); projectionMap.put("_id", mta + "." + S.COLUMN_MONSTER_TO_ARENA_ID + " AS " + "_id"); projectionMap.put(S.COLUMN_MONSTER_TO_ARENA_ID, mta + "." + S.COLUMN_MONSTER_TO_ARENA_ID); projectionMap.put(S.COLUMN_MONSTER_TO_ARENA_MONSTER_ID, mta + "." + S.COLUMN_MONSTER_TO_ARENA_MONSTER_ID); projectionMap.put(S.COLUMN_MONSTER_TO_ARENA_ARENA_ID, mta + "." + S.COLUMN_MONSTER_TO_ARENA_ARENA_ID); projectionMap.put(m + S.COLUMN_MONSTERS_NAME, m + "." + S.COLUMN_MONSTERS_NAME + " AS " + m + S.COLUMN_MONSTERS_NAME); projectionMap.put(S.COLUMN_MONSTERS_TRAIT, m + "." + S.COLUMN_MONSTERS_TRAIT); projectionMap.put(S.COLUMN_MONSTERS_FILE_LOCATION, m + "." + S.COLUMN_MONSTERS_FILE_LOCATION); projectionMap.put(a + S.COLUMN_ARENA_QUESTS_NAME, a + "." + S.COLUMN_ARENA_QUESTS_NAME + " AS " + a + S.COLUMN_ARENA_QUESTS_NAME); //Create new querybuilder SQLiteQueryBuilder QB = new SQLiteQueryBuilder(); QB.setTables(S.TABLE_MONSTER_TO_ARENA + " AS mta" + " LEFT OUTER JOIN " + S.TABLE_MONSTERS + " AS m" + " ON " + "mta." + S.COLUMN_MONSTER_TO_ARENA_MONSTER_ID + " = " + "m." + S.COLUMN_MONSTERS_ID + " LEFT OUTER JOIN " + S.TABLE_ARENA_QUESTS + " AS a " + " ON " + "mta." + S.COLUMN_MONSTER_TO_ARENA_ARENA_ID + " = " + "a." + S.COLUMN_ARENA_QUESTS_ID); QB.setDistinct(Distinct); QB.setProjectionMap(projectionMap); return QB; } /* * Get all quests based on monster */ public MonsterToQuestCursor queryMonsterToQuestMonster(long id) { QueryHelper qh = new QueryHelper(); qh.Distinct = true; qh.Table = S.TABLE_MONSTER_TO_QUEST; qh.Columns = null; qh.Selection = "mtq." + S.COLUMN_MONSTER_TO_QUEST_MONSTER_ID + " = ? "; qh.SelectionArgs = new String[]{"" + id}; qh.GroupBy = "q." + S.COLUMN_QUESTS_NAME; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new MonsterToQuestCursor(wrapJoinHelper(builderMonsterToQuest(qh.Distinct), qh)); } /* * Get all monsters based on quest */ public MonsterToQuestCursor queryMonsterToQuestQuest(long id) { QueryHelper qh = new QueryHelper(); qh.Distinct = false; qh.Table = S.TABLE_MONSTER_TO_QUEST; qh.Columns = null; qh.Selection = "mtq." + S.COLUMN_MONSTER_TO_QUEST_QUEST_ID + " = ? "; qh.SelectionArgs = new String[]{"" + id}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new MonsterToQuestCursor(wrapJoinHelper(builderMonsterToQuest(qh.Distinct), qh)); } /* * Helper method to query for MonsterToQuest */ private SQLiteQueryBuilder builderMonsterToQuest(boolean Distinct) { // SELECT mtq._id AS _id, mtq.monster_id, mtq.quest_id, // mtq.unstable, m.name AS mname, q.name AS qname, // q.hub, q.stars // FROM monster_to_quest AS mtq // LEFT OUTER JOIN monsters AS m ON mtq.monster_id = m._id // LEFT OUTER JOIN quests AS q ON mtq.quest_id = q._id; String mtq = "mtq"; String m = "m"; String q = "q"; HashMap<String, String> projectionMap = new HashMap<String, String>(); projectionMap.put("_id", mtq + "." + S.COLUMN_MONSTER_TO_QUEST_ID + " AS " + "_id"); projectionMap.put(S.COLUMN_MONSTER_TO_QUEST_MONSTER_ID, mtq + "." + S.COLUMN_MONSTER_TO_QUEST_MONSTER_ID); projectionMap.put(S.COLUMN_MONSTER_TO_QUEST_QUEST_ID, mtq + "." + S.COLUMN_MONSTER_TO_QUEST_QUEST_ID); projectionMap.put(S.COLUMN_MONSTER_TO_QUEST_UNSTABLE, mtq + "." + S.COLUMN_MONSTER_TO_QUEST_UNSTABLE); projectionMap.put(m + S.COLUMN_MONSTERS_NAME, m + "." + S.COLUMN_MONSTERS_NAME + " AS " + m + S.COLUMN_MONSTERS_NAME); projectionMap.put(S.COLUMN_MONSTERS_TRAIT, m + "." + S.COLUMN_MONSTERS_TRAIT); projectionMap.put(S.COLUMN_MONSTERS_FILE_LOCATION, m + "." + S.COLUMN_MONSTERS_FILE_LOCATION); projectionMap.put(q + S.COLUMN_QUESTS_NAME, q + "." + S.COLUMN_QUESTS_NAME + " AS " + q + S.COLUMN_QUESTS_NAME); projectionMap.put(S.COLUMN_QUESTS_HUB, q + "." + S.COLUMN_QUESTS_HUB); projectionMap.put(S.COLUMN_QUESTS_STARS, q + "." + S.COLUMN_QUESTS_STARS); //Create new querybuilder SQLiteQueryBuilder QB = new SQLiteQueryBuilder(); QB.setTables(S.TABLE_MONSTER_TO_QUEST + " AS mtq" + " LEFT OUTER JOIN " + S.TABLE_MONSTERS + " AS m" + " ON " + "mtq." + S.COLUMN_MONSTER_TO_QUEST_MONSTER_ID + " = " + "m." + S.COLUMN_MONSTERS_ID + " LEFT OUTER JOIN " + S.TABLE_QUESTS + " AS q " + " ON " + "mtq." + S.COLUMN_MONSTER_TO_QUEST_QUEST_ID + " = " + "q." + S.COLUMN_QUESTS_ID); QB.setDistinct(Distinct); QB.setProjectionMap(projectionMap); return QB; } /* * Get all quests */ public QuestCursor queryQuests() { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_QUESTS; qh.Selection = null; qh.SelectionArgs = null; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new QuestCursor(wrapJoinHelper(builderQuest(), qh)); } /* * Get a specific quest */ public QuestCursor queryQuest(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_QUESTS; qh.Selection = "q." + S.COLUMN_QUESTS_ID + " = ?"; qh.SelectionArgs = new String[]{ String.valueOf(id) }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = "1"; return new QuestCursor(wrapJoinHelper(builderQuest(), qh)); } /* * Get a specific quest based on hub */ public QuestCursor queryQuestHub(String hub) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_QUESTS; qh.Selection = "q." + S.COLUMN_QUESTS_HUB + " = ?"; qh.SelectionArgs = new String[]{ hub }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new QuestCursor(wrapJoinHelper(builderQuest(), qh)); } /* * Get a specific quest based on hub and stars */ public QuestCursor queryQuestHubStar(String hub, String stars) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_QUESTS; qh.Selection = "q." + S.COLUMN_QUESTS_HUB + " = ?" + " AND " + "q." + S.COLUMN_QUESTS_STARS + " = ?"; qh.SelectionArgs = new String[]{ hub, stars }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new QuestCursor(wrapJoinHelper(builderQuest(), qh)); } /* * Helper method to query for quests */ private SQLiteQueryBuilder builderQuest() { // SELECT q._id AS _id, q.name AS qname, q.goal, q.hub, q.type, q.stars, q.location_id, q.time_limit, // q.fee, q.reward, q.hrp, l.name AS lname, l.map // FROM quests AS q LEFT OUTER JOIN locations AS l ON q.location_id = l._id; String q = "q"; String l = "l"; HashMap<String, String> projectionMap = new HashMap<String, String>(); projectionMap.put("_id", q + "." + S.COLUMN_QUESTS_ID + " AS " + "_id"); projectionMap.put(q + S.COLUMN_QUESTS_NAME, q + "." + S.COLUMN_QUESTS_NAME + " AS " + q + S.COLUMN_QUESTS_NAME); projectionMap.put(S.COLUMN_QUESTS_GOAL, q + "." + S.COLUMN_QUESTS_GOAL); projectionMap.put(S.COLUMN_QUESTS_HUB, q + "." + S.COLUMN_QUESTS_HUB); projectionMap.put(S.COLUMN_QUESTS_TYPE, q + "." + S.COLUMN_QUESTS_TYPE); projectionMap.put(S.COLUMN_QUESTS_STARS, q + "." + S.COLUMN_QUESTS_STARS); projectionMap.put(S.COLUMN_QUESTS_LOCATION_ID, q + "." + S.COLUMN_QUESTS_LOCATION_ID); //projectionMap.put(S.COLUMN_QUESTS_LOCATION_TIME, q + "." + S.COLUMN_QUESTS_LOCATION_TIME); projectionMap.put(S.COLUMN_QUESTS_TIME_LIMIT, q + "." + S.COLUMN_QUESTS_TIME_LIMIT); projectionMap.put(S.COLUMN_QUESTS_FEE, q + "." + S.COLUMN_QUESTS_FEE); projectionMap.put(S.COLUMN_QUESTS_REWARD, q + "." + S.COLUMN_QUESTS_REWARD); projectionMap.put(S.COLUMN_QUESTS_HRP, q + "." + S.COLUMN_QUESTS_HRP); projectionMap.put(S.COLUMN_QUESTS_SUB_GOAL, q + "." + S.COLUMN_QUESTS_SUB_GOAL); projectionMap.put(S.COLUMN_QUESTS_SUB_REWARD, q + "." + S.COLUMN_QUESTS_SUB_REWARD); projectionMap.put(S.COLUMN_QUESTS_SUB_HRP, q + "." + S.COLUMN_QUESTS_SUB_HRP); projectionMap.put(l + S.COLUMN_LOCATIONS_NAME, l + "." + S.COLUMN_LOCATIONS_NAME + " AS " + l + S.COLUMN_LOCATIONS_NAME); projectionMap.put(S.COLUMN_LOCATIONS_MAP, l + "." + S.COLUMN_LOCATIONS_MAP); //Create new querybuilder SQLiteQueryBuilder QB = new SQLiteQueryBuilder(); QB.setTables(S.TABLE_QUESTS + " AS q" + " LEFT OUTER JOIN " + S.TABLE_LOCATIONS + " AS l" + " ON " + "q." + S.COLUMN_QUESTS_LOCATION_ID + " = " + "l." + S.COLUMN_LOCATIONS_ID); QB.setProjectionMap(projectionMap); return QB; } /* * Get all quest reward quests based on item */ public QuestRewardCursor queryQuestRewardItem(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_QUEST_REWARDS; qh.Selection = "qr." + S.COLUMN_QUEST_REWARDS_ITEM_ID + " = ? "; qh.SelectionArgs = new String[]{"" + id}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = "qr." + S.COLUMN_QUEST_REWARDS_PERCENTAGE + " DESC"; qh.Limit = null; return new QuestRewardCursor(wrapJoinHelper(builderQuestReward(), qh)); } /* * Get all quest reward items based on quest */ public QuestRewardCursor queryQuestRewardQuest(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_QUEST_REWARDS; qh.Selection = "qr." + S.COLUMN_QUEST_REWARDS_QUEST_ID + " = ? "; qh.SelectionArgs = new String[]{"" + id}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new QuestRewardCursor(wrapJoinHelper(builderQuestReward(), qh)); } /* * Helper method to query for QuestReward */ private SQLiteQueryBuilder builderQuestReward() { // SELECT qr._id AS _id, qr.quest_id, qr.item_id, // qr.reward_slot, qr.percentage, qr.stack_size, // q.name AS qname, q.hub, q.stars, i.name AS iname // FROM quest_rewards AS qr // LEFT OUTER JOIN quests AS q ON qr.quest_id = q._id // LEFT OUTER JOIN items AS i ON qr.item_id = i._id; String qr = "qr"; String i = "i"; String q = "q"; HashMap<String, String> projectionMap = new HashMap<String, String>(); projectionMap.put("_id", qr + "." + S.COLUMN_QUEST_REWARDS_ID + " AS " + "_id"); projectionMap.put(S.COLUMN_QUEST_REWARDS_ITEM_ID, qr + "." + S.COLUMN_QUEST_REWARDS_ITEM_ID); projectionMap.put(S.COLUMN_QUEST_REWARDS_QUEST_ID, qr + "." + S.COLUMN_QUEST_REWARDS_QUEST_ID); projectionMap.put(S.COLUMN_QUEST_REWARDS_REWARD_SLOT, qr + "." + S.COLUMN_QUEST_REWARDS_REWARD_SLOT); projectionMap.put(S.COLUMN_QUEST_REWARDS_PERCENTAGE, qr + "." + S.COLUMN_QUEST_REWARDS_PERCENTAGE); projectionMap.put(S.COLUMN_QUEST_REWARDS_STACK_SIZE, qr + "." + S.COLUMN_QUEST_REWARDS_STACK_SIZE); projectionMap.put(i + S.COLUMN_ITEMS_NAME, i + "." + S.COLUMN_ITEMS_NAME + " AS " + i + S.COLUMN_ITEMS_NAME); projectionMap.put(S.COLUMN_ITEMS_ICON_NAME, i + "." + S.COLUMN_ITEMS_ICON_NAME); projectionMap.put(q + S.COLUMN_QUESTS_NAME, q + "." + S.COLUMN_QUESTS_NAME + " AS " + q + S.COLUMN_QUESTS_NAME); projectionMap.put(S.COLUMN_QUESTS_HUB, q + "." + S.COLUMN_QUESTS_HUB); projectionMap.put(S.COLUMN_QUESTS_STARS, q + "." + S.COLUMN_QUESTS_STARS); //Create new querybuilder SQLiteQueryBuilder QB = new SQLiteQueryBuilder(); QB.setTables(S.TABLE_QUEST_REWARDS + " AS qr" + " LEFT OUTER JOIN " + S.TABLE_ITEMS + " AS i" + " ON " + "qr." + S.COLUMN_QUEST_REWARDS_ITEM_ID + " = " + "i." + S.COLUMN_ITEMS_ID + " LEFT OUTER JOIN " + S.TABLE_QUESTS + " AS q " + " ON " + "qr." + S.COLUMN_QUEST_REWARDS_QUEST_ID + " = " + "q." + S.COLUMN_QUESTS_ID); QB.setProjectionMap(projectionMap); return QB; } // public SkillCursor querySkill(long id) { // // "SELECT * FROM skills WHERE skill_id = id" // _Distinct = false; // _Table = S.TABLE_SKILLS; // _Columns = null; // _Selection = S.COLUMN_SKILLS_ID + " = ?"; // _SelectionArgs = new String[]{ String.valueOf(id) }; // _GroupBy = null; // _Having = null; // _OrderBy = null; // _Limit = null; // return new SkillCursor(wrapHelper()); /* * Get all skills for a skill tree */ public SkillCursor querySkillFromTree(long id) { // "SELECT * FROM skills WHERE skill_tree_id = id" QueryHelper qh = new QueryHelper(); qh.Distinct = false; qh.Table = S.TABLE_SKILLS; qh.Columns = null; qh.Selection = S.COLUMN_SKILLS_SKILL_TREE_ID + " = ?"; qh.SelectionArgs = new String[]{ String.valueOf(id) }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new SkillCursor(wrapHelper(qh)); } /* * Get all skill tress */ public SkillTreeCursor querySkillTrees() { // "SELECT DISTINCT * FROM skill_trees GROUP BY name" QueryHelper qh = new QueryHelper(); qh.Distinct = true; qh.Table = S.TABLE_SKILL_TREES; qh.Columns = null; qh.Selection = null; qh.SelectionArgs = null; qh.GroupBy = S.COLUMN_SKILL_TREES_NAME; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new SkillTreeCursor(wrapHelper(qh)); } /* * Get a specific skill tree */ public SkillTreeCursor querySkillTree(long id) { // "SELECT DISTINCT * FROM skill_trees WHERE _id = id LIMIT 1" QueryHelper qh = new QueryHelper(); qh.Distinct = false; qh.Table = S.TABLE_SKILL_TREES; qh.Columns = null; qh.Selection = S.COLUMN_SKILL_TREES_ID + " = ?"; qh.SelectionArgs = new String[]{ String.valueOf(id) }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = "1"; return new SkillTreeCursor(wrapHelper(qh)); } /* * Get all weapon */ public WeaponCursor queryWeapon() { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_WEAPONS; qh.Selection = null; qh.SelectionArgs = null; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new WeaponCursor(wrapJoinHelper(builderWeapon(), qh)); } /* * Get a specific weapon */ public WeaponCursor queryWeapon(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_WEAPONS; qh.Selection = "w." + S.COLUMN_WEAPONS_ID + " = ?"; qh.SelectionArgs = new String[]{ String.valueOf(id) }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = "1"; return new WeaponCursor(wrapJoinHelper(builderWeapon(), qh)); } /* * Get multiple specific weapon */ public WeaponCursor queryWeapons(long[] ids) { String[] string_list = new String[ids.length]; for(int i = 0; i < ids.length; i++){ string_list[i] = String.valueOf(ids[i]); } QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_WEAPONS; qh.Selection = "w." + S.COLUMN_WEAPONS_ID + " IN (" + makePlaceholders(ids.length) + ")"; qh.SelectionArgs = string_list; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new WeaponCursor(wrapJoinHelper(builderWeapon(), qh)); } /* * Get a specific weapon based on weapon type */ public WeaponCursor queryWeaponType(String type) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Table = S.TABLE_WEAPONS; qh.Selection = "w." + S.COLUMN_WEAPONS_WTYPE + " = ? "; qh.SelectionArgs = new String[]{type}; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new WeaponCursor(wrapJoinHelper(builderWeapon(), qh)); } /* * Helper method to query for weapon */ private SQLiteQueryBuilder builderWeapon() { // SELECT w._id AS _id, w.wtype, w.creation_cost, w.upgrade_cost, w.attack, w.max_attack, // w.elemental_attack, w.awakened_elemental_attack, w.defense, w.sharpness, w.affinity, // w.horn_notes, w.shelling_type, w.charge_levels, w.allowed_coatings, w.recoil, w.reload_speed, // w.rapid_fire, w.normal_shots, w.status_shots, w.elemental_shots, w.tool_shots, w.num_slots, // w.sharpness_file, // i.name, i.jpn_name, i.type, i.rarity, i.carry_capacity, i.buy, i.sell, i.description, // i.icon_name, i.armor_dupe_name_fix // FROM weapons AS w LEFT OUTER JOIN items AS i ON w._id = i._id; String w = "w"; String i = "i"; HashMap<String, String> projectionMap = new HashMap<String, String>(); projectionMap.put("_id", w + "." + S.COLUMN_WEAPONS_ID + " AS " + "_id"); projectionMap.put(S.COLUMN_WEAPONS_WTYPE, w + "." + S.COLUMN_WEAPONS_WTYPE); projectionMap.put(S.COLUMN_WEAPONS_CREATION_COST, w + "." + S.COLUMN_WEAPONS_CREATION_COST); projectionMap.put(S.COLUMN_WEAPONS_UPGRADE_COST, w + "." + S.COLUMN_WEAPONS_UPGRADE_COST); projectionMap.put(S.COLUMN_WEAPONS_ATTACK, w + "." + S.COLUMN_WEAPONS_ATTACK); projectionMap.put(S.COLUMN_WEAPONS_MAX_ATTACK, w + "." + S.COLUMN_WEAPONS_MAX_ATTACK); projectionMap.put(S.COLUMN_WEAPONS_ELEMENT, w + "." + S.COLUMN_WEAPONS_ELEMENT); projectionMap.put(S.COLUMN_WEAPONS_AWAKEN, w + "." + S.COLUMN_WEAPONS_AWAKEN); projectionMap.put(S.COLUMN_WEAPONS_ELEMENT_2, w + "." + S.COLUMN_WEAPONS_ELEMENT_2); projectionMap.put(S.COLUMN_WEAPONS_AWAKEN_ATTACK, w + "." + S.COLUMN_WEAPONS_AWAKEN_ATTACK); projectionMap.put(S.COLUMN_WEAPONS_ELEMENT_ATTACK, w + "." + S.COLUMN_WEAPONS_ELEMENT_ATTACK); projectionMap.put(S.COLUMN_WEAPONS_ELEMENT_2_ATTACK, w + "." + S.COLUMN_WEAPONS_ELEMENT_2_ATTACK); projectionMap.put(S.COLUMN_WEAPONS_DEFENSE, w + "." + S.COLUMN_WEAPONS_DEFENSE); projectionMap.put(S.COLUMN_WEAPONS_SHARPNESS, w + "." + S.COLUMN_WEAPONS_SHARPNESS); projectionMap.put(S.COLUMN_WEAPONS_AFFINITY, w + "." + S.COLUMN_WEAPONS_AFFINITY); projectionMap.put(S.COLUMN_WEAPONS_HORN_NOTES, w + "." + S.COLUMN_WEAPONS_HORN_NOTES); projectionMap.put(S.COLUMN_WEAPONS_SHELLING_TYPE, w + "." + S.COLUMN_WEAPONS_SHELLING_TYPE); projectionMap.put(S.COLUMN_WEAPONS_PHIAL, w + "." + S.COLUMN_WEAPONS_PHIAL); projectionMap.put(S.COLUMN_WEAPONS_CHARGES, w + "." + S.COLUMN_WEAPONS_CHARGES); projectionMap.put(S.COLUMN_WEAPONS_COATINGS, w + "." + S.COLUMN_WEAPONS_COATINGS); projectionMap.put(S.COLUMN_WEAPONS_RECOIL, w + "." + S.COLUMN_WEAPONS_RECOIL); projectionMap.put(S.COLUMN_WEAPONS_RELOAD_SPEED, w + "." + S.COLUMN_WEAPONS_RELOAD_SPEED); projectionMap.put(S.COLUMN_WEAPONS_RAPID_FIRE, w + "." + S.COLUMN_WEAPONS_RAPID_FIRE); projectionMap.put(S.COLUMN_WEAPONS_DEVIATION, w + "." + S.COLUMN_WEAPONS_DEVIATION); projectionMap.put(S.COLUMN_WEAPONS_AMMO, w + "." + S.COLUMN_WEAPONS_AMMO); projectionMap.put(S.COLUMN_WEAPONS_NUM_SLOTS, w + "." + S.COLUMN_WEAPONS_NUM_SLOTS); projectionMap.put(S.COLUMN_WEAPONS_SHARPNESS_FILE, w + "." + S.COLUMN_WEAPONS_SHARPNESS_FILE); projectionMap.put(S.COLUMN_WEAPONS_FINAL, w + "." + S.COLUMN_WEAPONS_FINAL); projectionMap.put(S.COLUMN_WEAPONS_TREE_DEPTH, w + "." + S.COLUMN_WEAPONS_TREE_DEPTH); projectionMap.put(S.COLUMN_WEAPONS_PARENT_ID, w + "." + S.COLUMN_WEAPONS_PARENT_ID); projectionMap.put(S.COLUMN_ITEMS_NAME, i + "." + S.COLUMN_ITEMS_NAME); projectionMap.put(S.COLUMN_ITEMS_JPN_NAME, i + "." + S.COLUMN_ITEMS_JPN_NAME); projectionMap.put(S.COLUMN_ITEMS_TYPE, i + "." + S.COLUMN_ITEMS_TYPE); projectionMap.put(S.COLUMN_ITEMS_SUB_TYPE, i + "." + S.COLUMN_ITEMS_SUB_TYPE); projectionMap.put(S.COLUMN_ITEMS_RARITY, i + "." + S.COLUMN_ITEMS_RARITY); projectionMap.put(S.COLUMN_ITEMS_CARRY_CAPACITY, i + "." + S.COLUMN_ITEMS_CARRY_CAPACITY); projectionMap.put(S.COLUMN_ITEMS_BUY, i + "." + S.COLUMN_ITEMS_BUY); projectionMap.put(S.COLUMN_ITEMS_SELL, i + "." + S.COLUMN_ITEMS_SELL); projectionMap.put(S.COLUMN_ITEMS_DESCRIPTION, i + "." + S.COLUMN_ITEMS_DESCRIPTION); projectionMap.put(S.COLUMN_ITEMS_ICON_NAME, i + "." + S.COLUMN_ITEMS_ICON_NAME); projectionMap.put(S.COLUMN_ITEMS_ARMOR_DUPE_NAME_FIX, i + "." + S.COLUMN_ITEMS_ARMOR_DUPE_NAME_FIX); //Create new querybuilder SQLiteQueryBuilder QB = new SQLiteQueryBuilder(); QB.setTables(S.TABLE_WEAPONS + " AS w" + " LEFT OUTER JOIN " + S.TABLE_ITEMS + " AS i" + " ON " + "w." + S.COLUMN_WEAPONS_ID + " = " + "i." + S.COLUMN_ITEMS_ID); QB.setProjectionMap(projectionMap); return QB; } /* * Get the parent weapon */ public WeaponTreeCursor queryWeaponTreeParent(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Selection = "i1." + S.COLUMN_ITEMS_ID + " = ?"; qh.SelectionArgs = new String[]{ String.valueOf(id) }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new WeaponTreeCursor(wrapJoinHelper(builderWeaponTreeParent(), qh)); } /* * Get the child weapon */ public WeaponTreeCursor queryWeaponTreeChild(long id) { QueryHelper qh = new QueryHelper(); qh.Columns = null; qh.Selection = "i1." + S.COLUMN_ITEMS_ID + " = ?"; qh.SelectionArgs = new String[]{ String.valueOf(id) }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new WeaponTreeCursor(wrapJoinHelper(builderWeaponTreeChild(), qh)); } /* * Helper method to query for weapon tree parent */ private SQLiteQueryBuilder builderWeaponTreeParent() { // SELECT i2._id, i2.name // FROM items AS i1 // LEFT OUTER JOIN components AS c ON i1._id = c.created_item_id // JOIN weapons AS w2 ON w2._id = c.component_item_id // LEFT OUTER JOIN items AS i2 ON i2._id = w2._id // WHERE i1._id = 'id'; String i1 = "i1"; String i2 = "i2"; String w2 = "w2"; String c = "c"; HashMap<String, String> projectionMap = new HashMap<String, String>(); projectionMap.put("_id", i2 + "." + S.COLUMN_ITEMS_ID + " AS " + "_id"); projectionMap.put(S.COLUMN_ITEMS_NAME, i2 + "." + S.COLUMN_ITEMS_NAME); //Create new querybuilder SQLiteQueryBuilder QB = new SQLiteQueryBuilder(); QB.setTables(S.TABLE_ITEMS + " AS i1" + " LEFT OUTER JOIN " + S.TABLE_COMPONENTS + " AS c" + " ON " + "i1." + S.COLUMN_ITEMS_ID + " = " + "c." + S.COLUMN_COMPONENTS_CREATED_ITEM_ID + " JOIN " + S.TABLE_WEAPONS + " AS w2" + " ON " + "w2." + S.COLUMN_WEAPONS_ID + " = " + "c." + S.COLUMN_COMPONENTS_COMPONENT_ITEM_ID + " LEFT OUTER JOIN " + S.TABLE_ITEMS + " AS i2" + " ON " + "i2." + S.COLUMN_ITEMS_ID + " = " + "w2." + S.COLUMN_WEAPONS_ID ); QB.setProjectionMap(projectionMap); return QB; } /* * Helper method to query for weapon tree child */ private SQLiteQueryBuilder builderWeaponTreeChild() { // SELECT i2._id, i2.name // FROM items AS i1 // LEFT OUTER JOIN components AS c ON i1._id = c.component_item_id // JOIN weapons AS w2 ON w2._id = c.created_item_id // LEFT OUTER JOIN items AS i2 ON i2._id = w2._id // WHERE i1._id = '_id'; String i1 = "i1"; String i2 = "i2"; String w2 = "w2"; String c = "c"; HashMap<String, String> projectionMap = new HashMap<String, String>(); projectionMap.put("_id", i2 + "." + S.COLUMN_ITEMS_ID + " AS " + "_id"); projectionMap.put(S.COLUMN_ITEMS_NAME, i2 + "." + S.COLUMN_ITEMS_NAME); //Create new querybuilder SQLiteQueryBuilder QB = new SQLiteQueryBuilder(); QB.setTables(S.TABLE_ITEMS + " AS i1" + " LEFT OUTER JOIN " + S.TABLE_COMPONENTS + " AS c" + " ON " + "i1." + S.COLUMN_ITEMS_ID + " = " + "c." + S.COLUMN_COMPONENTS_COMPONENT_ITEM_ID + " JOIN " + S.TABLE_WEAPONS + " AS w2" + " ON " + "w2." + S.COLUMN_WEAPONS_ID + " = " + "c." + S.COLUMN_COMPONENTS_CREATED_ITEM_ID + " LEFT OUTER JOIN " + S.TABLE_ITEMS + " AS i2" + " ON " + "i2." + S.COLUMN_ITEMS_ID + " = " + "w2." + S.COLUMN_WEAPONS_ID ); QB.setProjectionMap(projectionMap); return QB; } /* * Get all wishlist */ public WishlistCursor queryWishlists() { QueryHelper qh = new QueryHelper(); qh.Distinct = false; qh.Table = S.TABLE_WISHLIST; qh.Columns = null; qh.Selection = null; qh.SelectionArgs = null; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new WishlistCursor(wrapHelper(qh)); } /* * Get all wishlist using a specific db instance */ public WishlistCursor queryWishlists(SQLiteDatabase db) { QueryHelper qh = new QueryHelper(); qh.Distinct = false; qh.Table = S.TABLE_WISHLIST; qh.Columns = null; qh.Selection = null; qh.SelectionArgs = null; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; return new WishlistCursor(wrapHelper(db, qh)); } /* * Get a specific wishlist */ public WishlistCursor queryWishlist(long id) { QueryHelper qh = new QueryHelper(); qh.Distinct = false; qh.Table = S.TABLE_WISHLIST; qh.Columns = null; qh.Selection = S.COLUMN_WISHLIST_ID + " = ?"; qh.SelectionArgs = new String[]{ String.valueOf(id) }; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = "1"; return new WishlistCursor(wrapHelper(qh)); } /* * Add a wishlist */ public long queryAddWishlist(String name) { ContentValues values = new ContentValues(); values.put(S.COLUMN_WISHLIST_NAME, name); return insertRecord(S.TABLE_WISHLIST, values); } /* * Add a wishlist with all info */ public long queryAddWishlistAll(SQLiteDatabase db, long id, String name) { ContentValues values = new ContentValues(); values.put(S.COLUMN_WISHLIST_ID, id); values.put(S.COLUMN_WISHLIST_NAME, name); return insertRecord(db, S.TABLE_WISHLIST, values); } public int queryUpdateWishlist(long id, String name) { String strFilter = S.COLUMN_WISHLIST_ID + " = " + id; ContentValues values = new ContentValues(); values.put(S.COLUMN_WISHLIST_NAME, name); return updateRecord(S.TABLE_WISHLIST, strFilter, values); } public boolean queryDeleteWishlist(long id) { String where = S.COLUMN_WISHLIST_ID + " = ?"; String[] args = new String[]{"" + id}; boolean w1 = deleteRecord(S.TABLE_WISHLIST, where, args); where = S.COLUMN_WISHLIST_DATA_WISHLIST_ID + " = ?"; boolean w2 = deleteRecord(S.TABLE_WISHLIST_DATA, where, args); return (w1 && w2); } /* * Get all wishlist data */ public WishlistDataCursor queryWishlistsData() { QueryHelper qh = new QueryHelper(); qh.Distinct = false; qh.Table = S.TABLE_WISHLIST_DATA; qh.Columns = null; qh.Selection = null; qh.SelectionArgs = null; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; // Multithread issues workaround SQLiteQueryBuilder qb = builderWishlistData(); Cursor cursor = qb.query( getReadableDatabase(), qh.Columns, qh.Selection, qh.SelectionArgs, qh.GroupBy, qh.Having, qh.OrderBy, qh.Limit); return new WishlistDataCursor(cursor); } /* * Get all wishlist data using specific db instance */ public WishlistDataCursor queryWishlistsData(SQLiteDatabase db) { QueryHelper qh = new QueryHelper(); qh.Distinct = false; qh.Table = S.TABLE_WISHLIST_DATA; qh.Columns = null; qh.Selection = null; qh.SelectionArgs = null; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; // Multithread issues workaround SQLiteQueryBuilder qb = builderWishlistData(); Cursor cursor = qb.query( db, qh.Columns, qh.Selection, qh.SelectionArgs, qh.GroupBy, qh.Having, qh.OrderBy, qh.Limit); return new WishlistDataCursor(cursor); } /* * Get all wishlist data for a specific wishlist */ public WishlistDataCursor queryWishlistData(long id) { String[] wdColumns = null; String wdSelection = "wd." + S.COLUMN_WISHLIST_DATA_WISHLIST_ID + " = ?"; String[] wdSelectionArgs = new String[]{ String.valueOf(id) }; String wdGroupBy = null; String wdHaving = null; String wdOrderBy = "wd." + S.COLUMN_WISHLIST_DATA_ITEM_ID + " ASC"; String wdLimit = null; // Multithread issues workaround SQLiteQueryBuilder qb = builderWishlistData(); Cursor cursor = qb.query( getReadableDatabase(), wdColumns, wdSelection, wdSelectionArgs, wdGroupBy, wdHaving, wdOrderBy, wdLimit); return new WishlistDataCursor(cursor); } /* * Get all wishlist data for a specific wishlist data id */ public WishlistDataCursor queryWishlistDataId(long id) { String[] wdColumns = null; String wdSelection = "wd." + S.COLUMN_WISHLIST_DATA_ID + " = ?"; String[] wdSelectionArgs = new String[]{ String.valueOf(id) }; String wdGroupBy = null; String wdHaving = null; String wdOrderBy = null; String wdLimit = null; // Multithread issues workaround SQLiteQueryBuilder qb = builderWishlistData(); Cursor cursor = qb.query( getReadableDatabase(), wdColumns, wdSelection, wdSelectionArgs, wdGroupBy, wdHaving, wdOrderBy, wdLimit); return new WishlistDataCursor(cursor); } /* * Get all data for a specific wishlist and item */ public WishlistDataCursor queryWishlistData(long wd_id, long item_id, String path) { String[] wdColumns = null; String wdSelection = "wd." + S.COLUMN_WISHLIST_DATA_WISHLIST_ID + " = ?" + " AND " + "wd." + S.COLUMN_WISHLIST_DATA_ITEM_ID + " = ?" + " AND " + "wd." + S.COLUMN_WISHLIST_DATA_PATH + " = ?"; String[] wdSelectionArgs = new String[]{ String.valueOf(wd_id), String.valueOf(item_id), path }; String wdGroupBy = null; String wdHaving = null; String wdOrderBy = null; String wdLimit = null; // Multithread issues workaround SQLiteQueryBuilder qb = builderWishlistData(); Cursor cursor = qb.query( getReadableDatabase(), wdColumns, wdSelection, wdSelectionArgs, wdGroupBy, wdHaving, wdOrderBy, wdLimit); return new WishlistDataCursor(cursor); } /* * Add a wishlist data to a specific wishlist */ public long queryAddWishlistData(long wishlist_id, long item_id, int quantity, String path) { ContentValues values = new ContentValues(); values.put(S.COLUMN_WISHLIST_DATA_WISHLIST_ID, wishlist_id); values.put(S.COLUMN_WISHLIST_DATA_ITEM_ID, item_id); values.put(S.COLUMN_WISHLIST_DATA_QUANTITY, quantity); values.put(S.COLUMN_WISHLIST_DATA_PATH, path); return insertRecord(S.TABLE_WISHLIST_DATA, values); } /* * Add a wishlist data to a specific wishlist for copying */ public long queryAddWishlistDataAll(long wishlist_id, long item_id, int quantity, int satisfied, String path) { ContentValues values = new ContentValues(); values.put(S.COLUMN_WISHLIST_DATA_WISHLIST_ID, wishlist_id); values.put(S.COLUMN_WISHLIST_DATA_ITEM_ID, item_id); values.put(S.COLUMN_WISHLIST_DATA_QUANTITY, quantity); values.put(S.COLUMN_WISHLIST_DATA_SATISFIED, satisfied); values.put(S.COLUMN_WISHLIST_DATA_PATH, path); return insertRecord(S.TABLE_WISHLIST_DATA, values); } /* * Add a wishlist data to a specific wishlist for copying */ public long queryAddWishlistDataAll(SQLiteDatabase db, long wishlist_id, long item_id, int quantity, int satisfied, String path) { ContentValues values = new ContentValues(); values.put(S.COLUMN_WISHLIST_DATA_WISHLIST_ID, wishlist_id); values.put(S.COLUMN_WISHLIST_DATA_ITEM_ID, item_id); values.put(S.COLUMN_WISHLIST_DATA_QUANTITY, quantity); values.put(S.COLUMN_WISHLIST_DATA_SATISFIED, satisfied); values.put(S.COLUMN_WISHLIST_DATA_PATH, path); return insertRecord(db, S.TABLE_WISHLIST_DATA, values); } /* * Update a wishlist data to a specific wishlist */ public int queryUpdateWishlistDataQuantity(long id, int quantity) { String strFilter = S.COLUMN_WISHLIST_DATA_ID + " = " + id; ContentValues values = new ContentValues(); values.put(S.COLUMN_WISHLIST_DATA_QUANTITY, quantity); return updateRecord(S.TABLE_WISHLIST_DATA, strFilter, values); } /* * Update a wishlist data to a specific wishlist */ public int queryUpdateWishlistDataSatisfied(long id, int satisfied) { String strFilter = S.COLUMN_WISHLIST_DATA_ID + " = " + id; ContentValues values = new ContentValues(); values.put(S.COLUMN_WISHLIST_DATA_SATISFIED, satisfied); return updateRecord(S.TABLE_WISHLIST_DATA, strFilter, values); } public boolean queryDeleteWishlistData(long id) { String where = S.COLUMN_WISHLIST_DATA_ID + " = ?"; String[] args = new String[]{ "" + id }; return deleteRecord(S.TABLE_WISHLIST_DATA, where, args); } /* * Helper method to query for wishlistData */ private SQLiteQueryBuilder builderWishlistData() { // SELECT wd._id AS _id, wd.wishlist_id, wd.item_id, wd.quantity, wd.satisfied, wd.path // i.name, i.jpn_name, i.type, i.rarity, i.carry_capacity, i.buy, i.sell, i.description, // i.icon_name, i.armor_dupe_name_fix // FROM wishlist_data AS wd // LEFT OUTER JOIN wishlist AS w ON wd.wishlist_id = w._id // LEFT OUTER JOIN items AS i ON wd.item_id = i._id; String wd = "wd"; String w = "w"; String i = "i"; HashMap<String, String> projectionMap = new HashMap<String, String>(); projectionMap.put("_id", wd + "." + S.COLUMN_WISHLIST_DATA_ID + " AS " + "_id"); projectionMap.put(S.COLUMN_WISHLIST_DATA_WISHLIST_ID, wd + "." + S.COLUMN_WISHLIST_DATA_WISHLIST_ID); projectionMap.put(S.COLUMN_WISHLIST_DATA_ITEM_ID, wd + "." + S.COLUMN_WISHLIST_DATA_ITEM_ID); projectionMap.put(S.COLUMN_WISHLIST_DATA_QUANTITY, wd + "." + S.COLUMN_WISHLIST_DATA_QUANTITY); projectionMap.put(S.COLUMN_WISHLIST_DATA_SATISFIED, wd + "." + S.COLUMN_WISHLIST_DATA_SATISFIED); projectionMap.put(S.COLUMN_WISHLIST_DATA_PATH, wd + "." + S.COLUMN_WISHLIST_DATA_PATH); projectionMap.put(S.COLUMN_ITEMS_NAME, i + "." + S.COLUMN_ITEMS_NAME); projectionMap.put(S.COLUMN_ITEMS_JPN_NAME, i + "." + S.COLUMN_ITEMS_JPN_NAME); projectionMap.put(S.COLUMN_ITEMS_TYPE, i + "." + S.COLUMN_ITEMS_TYPE); projectionMap.put(S.COLUMN_ITEMS_SUB_TYPE, i + "." + S.COLUMN_ITEMS_SUB_TYPE); projectionMap.put(S.COLUMN_ITEMS_RARITY, i + "." + S.COLUMN_ITEMS_RARITY); projectionMap.put(S.COLUMN_ITEMS_CARRY_CAPACITY, i + "." + S.COLUMN_ITEMS_CARRY_CAPACITY); projectionMap.put(S.COLUMN_ITEMS_BUY, i + "." + S.COLUMN_ITEMS_BUY); projectionMap.put(S.COLUMN_ITEMS_SELL, i + "." + S.COLUMN_ITEMS_SELL); projectionMap.put(S.COLUMN_ITEMS_DESCRIPTION, i + "." + S.COLUMN_ITEMS_DESCRIPTION); projectionMap.put(S.COLUMN_ITEMS_ICON_NAME, i + "." + S.COLUMN_ITEMS_ICON_NAME); projectionMap.put(S.COLUMN_ITEMS_ARMOR_DUPE_NAME_FIX, i + "." + S.COLUMN_ITEMS_ARMOR_DUPE_NAME_FIX); //Create new querybuilder SQLiteQueryBuilder QB = new SQLiteQueryBuilder(); QB.setTables(S.TABLE_WISHLIST_DATA + " AS wd" + " LEFT OUTER JOIN " + S.TABLE_WISHLIST + " AS w" + " ON " + "wd." + S.COLUMN_WISHLIST_DATA_WISHLIST_ID + " = " + "w." + S.COLUMN_WISHLIST_ID + " LEFT OUTER JOIN " + S.TABLE_ITEMS + " AS i" + " ON " + "wd." + S.COLUMN_WISHLIST_DATA_ITEM_ID + " = " + "i." + S.COLUMN_ITEMS_ID); QB.setProjectionMap(projectionMap); return QB; } /* * Get all wishlist components */ public WishlistComponentCursor queryWishlistsComponent() { QueryHelper qh = new QueryHelper(); qh.Distinct = false; qh.Table = S.TABLE_WISHLIST_COMPONENT; qh.Columns = null; qh.Selection = null; qh.SelectionArgs = null; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; // Multithread issues workaround SQLiteQueryBuilder qb = builderWishlistComponent(); Cursor cursor = qb.query( getReadableDatabase(), qh.Columns, qh.Selection, qh.SelectionArgs, qh.GroupBy, qh.Having, qh.OrderBy, qh.Limit); return new WishlistComponentCursor(cursor); } /** * Get all wishlist components using a specific db instance * @param db * @return */ public WishlistComponentCursor queryWishlistsComponent(SQLiteDatabase db) { QueryHelper qh = new QueryHelper(); qh.Distinct = false; qh.Table = S.TABLE_WISHLIST_COMPONENT; qh.Columns = null; qh.Selection = null; qh.SelectionArgs = null; qh.GroupBy = null; qh.Having = null; qh.OrderBy = null; qh.Limit = null; // Multithread issues workaround SQLiteQueryBuilder qb = builderWishlistComponent(); Cursor cursor = qb.query( db, qh.Columns, qh.Selection, qh.SelectionArgs, qh.GroupBy, qh.Having, qh.OrderBy, qh.Limit); return new WishlistComponentCursor(cursor); } /* * Get all wishlist components for a specific wishlist */ public WishlistComponentCursor queryWishlistComponents(long id) { String[] wcColumns = null; String wcSelection = "wc." + S.COLUMN_WISHLIST_COMPONENT_WISHLIST_ID + " = ?"; String[] wcSelectionArgs = new String[]{ String.valueOf(id) }; String wcGroupBy = null; String wcHaving = null; String wcOrderBy = "wc." + S.COLUMN_WISHLIST_COMPONENT_COMPONENT_ID + " ASC"; String wcLimit = null; // Multithread issues workaround SQLiteQueryBuilder qb = builderWishlistComponent(); Cursor cursor = qb.query( getReadableDatabase(), wcColumns, wcSelection, wcSelectionArgs, wcGroupBy, wcHaving, wcOrderBy, wcLimit); return new WishlistComponentCursor(cursor); } /* * Get all data for a specific wishlist and item */ public WishlistComponentCursor queryWishlistComponent(long wc_id, long item_id) { String[] wcColumns = null; String wcSelection = "wc." + S.COLUMN_WISHLIST_COMPONENT_WISHLIST_ID + " = ?" + " AND " + "wc." + S.COLUMN_WISHLIST_COMPONENT_COMPONENT_ID + " = ?"; String[] wcSelectionArgs = new String[]{ String.valueOf(wc_id), String.valueOf(item_id) }; String wcGroupBy = null; String wcHaving = null; String wcOrderBy = null; String wcLimit = null; // Multithread issues workaround SQLiteQueryBuilder qb = builderWishlistComponent(); Cursor cursor = qb.query( getReadableDatabase(), wcColumns, wcSelection, wcSelectionArgs, wcGroupBy, wcHaving, wcOrderBy, wcLimit); return new WishlistComponentCursor(cursor); } /* * Get all wishlist components for a specific id */ public WishlistComponentCursor queryWishlistComponentId(long id) { String[] wcColumns = null; String wcSelection = "wc." + S.COLUMN_WISHLIST_COMPONENT_ID + " = ?"; String[] wcSelectionArgs = new String[]{ String.valueOf(id) }; String wcGroupBy = null; String wcHaving = null; String wcOrderBy = null; String wcLimit = null; // Multithread issues workaround SQLiteQueryBuilder qb = builderWishlistComponent(); Cursor cursor = qb.query( getReadableDatabase(), wcColumns, wcSelection, wcSelectionArgs, wcGroupBy, wcHaving, wcOrderBy, wcLimit); return new WishlistComponentCursor(cursor); } /* * Add a wishlist component to a specific wishlist */ public long queryAddWishlistComponent(long wishlist_id, long component_id, int quantity) { ContentValues values = new ContentValues(); values.put(S.COLUMN_WISHLIST_COMPONENT_WISHLIST_ID, wishlist_id); values.put(S.COLUMN_WISHLIST_COMPONENT_COMPONENT_ID, component_id); values.put(S.COLUMN_WISHLIST_COMPONENT_QUANTITY, quantity); return insertRecord(S.TABLE_WISHLIST_COMPONENT, values); } /* * Add a wishlist component to a specific wishlist */ public long queryAddWishlistComponentAll(long wishlist_id, long component_id, int quantity, int notes) { ContentValues values = new ContentValues(); values.put(S.COLUMN_WISHLIST_COMPONENT_WISHLIST_ID, wishlist_id); values.put(S.COLUMN_WISHLIST_COMPONENT_COMPONENT_ID, component_id); values.put(S.COLUMN_WISHLIST_COMPONENT_QUANTITY, quantity); values.put(S.COLUMN_WISHLIST_COMPONENT_NOTES, notes); return insertRecord(S.TABLE_WISHLIST_COMPONENT, values); } /* * Add a wishlist component to a specific wishlist */ public long queryAddWishlistComponentAll(SQLiteDatabase db, long wishlist_id, long component_id, int quantity, int notes) { ContentValues values = new ContentValues(); values.put(S.COLUMN_WISHLIST_COMPONENT_WISHLIST_ID, wishlist_id); values.put(S.COLUMN_WISHLIST_COMPONENT_COMPONENT_ID, component_id); values.put(S.COLUMN_WISHLIST_COMPONENT_QUANTITY, quantity); values.put(S.COLUMN_WISHLIST_COMPONENT_NOTES, notes); return insertRecord(db, S.TABLE_WISHLIST_COMPONENT, values); } /* * Update a wishlist component to a specific wishlist */ public int queryUpdateWishlistComponentQuantity(long id, int quantity) { String strFilter = S.COLUMN_WISHLIST_COMPONENT_ID + " = " + id; ContentValues values = new ContentValues(); values.put(S.COLUMN_WISHLIST_COMPONENT_QUANTITY, quantity); return updateRecord(S.TABLE_WISHLIST_COMPONENT, strFilter, values); } public boolean queryDeleteWishlistComponent(long id) { String where = S.COLUMN_WISHLIST_COMPONENT_ID + " = ?"; String[] args = new String[]{ "" + id }; return deleteRecord(S.TABLE_WISHLIST_COMPONENT, where, args); } /* * Update a wishlist component to a specific wishlist */ public int queryUpdateWishlistComponentNotes(long id, int notes) { String strFilter = S.COLUMN_WISHLIST_COMPONENT_ID + " = " + id; ContentValues values = new ContentValues(); values.put(S.COLUMN_WISHLIST_COMPONENT_NOTES, notes); return updateRecord(S.TABLE_WISHLIST_COMPONENT, strFilter, values); } /* * Helper method to query components for wishlistData */ private SQLiteQueryBuilder builderWishlistComponent() { // SELECT wc._id AS _id, wc.wishlist_id, wc.component_id, wc.quantity, wc.notes // i.name, i.jpn_name, i.type, i.sub_type, i.rarity, i.carry_capacity, i.buy, i.sell, i.description, // i.icon_name, i.armor_dupe_name_fix // FROM wishlist_component AS wc // LEFT OUTER JOIN wishlist AS w ON wd.wishlist_id = w._ic // LEFT OUTER JOIN items AS i ON wc.component_id = i._id; String wc = "wc"; String w = "w"; String i = "i"; HashMap<String, String> projectionMap = new HashMap<String, String>(); projectionMap.put("_id", wc + "." + S.COLUMN_WISHLIST_COMPONENT_ID + " AS " + "_id"); projectionMap.put(S.COLUMN_WISHLIST_COMPONENT_WISHLIST_ID, wc + "." + S.COLUMN_WISHLIST_COMPONENT_WISHLIST_ID); projectionMap.put(S.COLUMN_WISHLIST_COMPONENT_COMPONENT_ID, wc + "." + S.COLUMN_WISHLIST_COMPONENT_COMPONENT_ID); projectionMap.put(S.COLUMN_WISHLIST_COMPONENT_QUANTITY, wc + "." + S.COLUMN_WISHLIST_COMPONENT_QUANTITY); projectionMap.put(S.COLUMN_WISHLIST_COMPONENT_NOTES, wc + "." + S.COLUMN_WISHLIST_COMPONENT_NOTES); projectionMap.put(S.COLUMN_ITEMS_NAME, i + "." + S.COLUMN_ITEMS_NAME); projectionMap.put(S.COLUMN_ITEMS_JPN_NAME, i + "." + S.COLUMN_ITEMS_JPN_NAME); projectionMap.put(S.COLUMN_ITEMS_TYPE, i + "." + S.COLUMN_ITEMS_TYPE); projectionMap.put(S.COLUMN_ITEMS_SUB_TYPE, i + "." + S.COLUMN_ITEMS_SUB_TYPE); projectionMap.put(S.COLUMN_ITEMS_RARITY, i + "." + S.COLUMN_ITEMS_RARITY); projectionMap.put(S.COLUMN_ITEMS_CARRY_CAPACITY, i + "." + S.COLUMN_ITEMS_CARRY_CAPACITY); projectionMap.put(S.COLUMN_ITEMS_BUY, i + "." + S.COLUMN_ITEMS_BUY); projectionMap.put(S.COLUMN_ITEMS_SELL, i + "." + S.COLUMN_ITEMS_SELL); projectionMap.put(S.COLUMN_ITEMS_DESCRIPTION, i + "." + S.COLUMN_ITEMS_DESCRIPTION); projectionMap.put(S.COLUMN_ITEMS_ICON_NAME, i + "." + S.COLUMN_ITEMS_ICON_NAME); projectionMap.put(S.COLUMN_ITEMS_ARMOR_DUPE_NAME_FIX, i + "." + S.COLUMN_ITEMS_ARMOR_DUPE_NAME_FIX); //Create new querybuilder SQLiteQueryBuilder QB = new SQLiteQueryBuilder(); QB.setTables(S.TABLE_WISHLIST_COMPONENT + " AS wc" + " LEFT OUTER JOIN " + S.TABLE_WISHLIST + " AS w" + " ON " + "wc." + S.COLUMN_WISHLIST_COMPONENT_WISHLIST_ID + " = " + "w." + S.COLUMN_WISHLIST_ID + " LEFT OUTER JOIN " + S.TABLE_ITEMS + " AS i" + " ON " + "wc." + S.COLUMN_WISHLIST_COMPONENT_COMPONENT_ID + " = " + "i." + S.COLUMN_ITEMS_ID); QB.setProjectionMap(projectionMap); return QB; } }
package com.psychic_engine.cmput301w17t10.feelsappman; import android.Manifest; import android.content.Intent; import android.content.pm.PackageManager; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.drawable.BitmapDrawable; import android.location.Location; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.provider.MediaStore; import android.support.annotation.Nullable; import android.support.v4.app.ActivityCompat; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.View; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.EditText; import android.widget.ImageView; import android.widget.Spinner; import android.widget.Toast; import java.util.ArrayList; import java.util.List; import static com.psychic_engine.cmput301w17t10.feelsappman.R.id.imageView; import static java.lang.Boolean.FALSE; import static java.lang.Boolean.TRUE; /** * CreateMoodActivity will be similar to the EditMoodActivity in such a way that the format will be * the same. However, one will be able to edit previously created events and one will only be able * to create new ones. The participant will be able to enter a variety of options where the mood * state is mandatory for entry, while others are optional. * @see EditMoodActivity */ public class CreateMoodActivity extends AppCompatActivity { private static final String defaultTriggerMsg = "20 chars or 3 words."; private static int RESULT_LOAD_IMAGE = 1; private Spinner moodSpinner; private Spinner socialSettingSpinner; private EditText triggerEditText; private EditText locationEditText; // TODO: change type private Button browseButton; private ImageView photoImageView; private Button createButton; private Button cancelButton; /** * Calls upon the methods to initialize the UI needed. * @param savedInstanceState */ @Override protected void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); photoImageView = (ImageView) findViewById(R.id.imageView); setContentView(R.layout.activity_create_mood); isStoragePermissionGranted(); // set up mood and social setting spinners (drop downs) setUpSpinners(); // set up events that happen when user clicks in trigger and outside trigger setUpTrigger(); // set up events that happen when user clicks browse button setUpBrowse(); // set up events that happen when user clicks create button setUpCreate(); // set up events that happen when user clicks cancel button setUpCancel(); } //Taken from http://stackoverflow.com/questions/33162152/storage-permission-error-in-marshmallow/41221852#41221852 //March 10, 2017 public boolean isStoragePermissionGranted() { if (Build.VERSION.SDK_INT >= 23) { if (checkSelfPermission(android.Manifest.permission.READ_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED) { return true; } else { ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.READ_EXTERNAL_STORAGE}, 1); return false; } } else { return true; } } @Override public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) { super.onRequestPermissionsResult(requestCode, permissions, grantResults); if(grantResults[0]== PackageManager.PERMISSION_GRANTED){ } } /** * Main method to call whenever the participant was to create their mood event after setting * their options. The system will obtain all of the information (null or not), and then save * the mood event into the participants own arrays of mood events. Depending on the options, * the participant will be able to save it as a picture, which will have required a prompt to * access the external storage. */ void createMoodEvent() { // get the mood from the mood spinner String moodString = moodSpinner.getSelectedItem().toString(); String socialSettingString = moodSpinner.getSelectedItem().toString(); // get the trigger from the trigger edit text String trigger = triggerEditText.getText().toString(); //initially sets photo to null Photograph photo = null; boolean photoSizeUnder = TRUE; if (photoImageView != null) { //March 10, 2017 //gets drawable from imageview and converts drawable to bitmap BitmapDrawable drawable = (BitmapDrawable) photoImageView.getDrawable(); Bitmap bitmap = drawable.getBitmap(); photo = new Photograph(bitmap); photoSizeUnder = photo.getLimitSize(); } Location location = null; // TODO get location from location box - need to know how to use GOOGLE MAPS first //TODO call this explicitly like this or through notifyObservers() if (photoSizeUnder) { boolean success = CreateMoodController.updateMoodEventList(moodString, socialSettingString, trigger, photo, location); if (!success) { Toast.makeText(CreateMoodActivity.this, "Please specify a mood.", Toast.LENGTH_LONG).show(); } else { Intent intent = new Intent(CreateMoodActivity.this, SelfNewsFeedActvity.class); startActivity(intent); } } else { Toast.makeText(CreateMoodActivity.this, "Photo size is too large! (Max 65536 bytes)", Toast.LENGTH_LONG).show(); } //TODO: MoodEvent list for selfParticipant needs to save //TODO: MoodEvent list resets on app termination and reopen //TODO: Maybe try to get the bring in saveInFile on a superclass and keep on all activities for (MoodEvent mood : ParticipantSingleton.getInstance().getSelfParticipant().getMoodList()) { Log.i("MoodEvent Added", "This mood event is of: " + mood.getMood().getMood()); } } /** * Setup method to create the spinners in the UI */ void setUpSpinners() { // Spinner elements moodSpinner = (Spinner) findViewById(R.id.moodDropDown); socialSettingSpinner = (Spinner) findViewById(R.id.socialSettingDropDown); // Spinner drop down elements List<String> moodCategories = new ArrayList<String>(); moodCategories.add(""); // default option MoodState[] moodStates = MoodState.values(); for (MoodState moodState : moodStates) { moodCategories.add(moodState.toString()); } List<String> socialSettingCategories = new ArrayList<String>(); socialSettingCategories.add(""); // default option SocialSetting[] socialSettings = SocialSetting.values(); for (SocialSetting socialSetting : socialSettings) { socialSettingCategories.add(socialSetting.toString()); } // Creating adapter for spinners ArrayAdapter<String> moodSpinnerAdapter = new ArrayAdapter<String>( this, android.R.layout.simple_spinner_item, moodCategories); ArrayAdapter<String> socialSettingSpinnerAdapter = new ArrayAdapter<String>( this, android.R.layout.simple_spinner_item, socialSettingCategories); // Drop down layout style - list view with radio button moodSpinnerAdapter.setDropDownViewResource( android.R.layout.simple_spinner_dropdown_item); socialSettingSpinnerAdapter.setDropDownViewResource( android.R.layout.simple_spinner_dropdown_item); // Attaching adapter to spinner moodSpinner.setAdapter(moodSpinnerAdapter); socialSettingSpinner.setAdapter(socialSettingSpinnerAdapter); } /** * Setup method for the trigger EditText category */ void setUpTrigger() { triggerEditText = (EditText) findViewById(R.id.trigger); triggerEditText.setText(""); // TODO not working perfectly - requires 2 clicks after initial click // TODO giving me errors in test - leaving it blank for now // clear trigger edit text when user clicks in it if default msg is displayed /* triggerEditText = (EditText) findViewById(R.id.trigger); triggerEditText.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (triggerEditText.getText().toString().equals(defaultTriggerMsg)) triggerEditText.setText(""); } }); // reset trigger edit text message if the user clicks away from it and it is blank triggerEditText.setOnFocusChangeListener(new View.OnFocusChangeListener() { @Override public void onFocusChange(View v, boolean hasFocus) { if (!hasFocus) { // user has clicked out of triggerEditText if (triggerEditText.getText().toString().equals("")) triggerEditText.setText(defaultTriggerMsg); } } }); */ } /** * Setup method the browse button, being able to select pictures from the phone storage. */ void setUpBrowse() { // on 03-06-17 browseButton = (Button) findViewById(R.id.browse); browseButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent i = new Intent( Intent.ACTION_PICK, android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI); startActivityForResult(i, RESULT_LOAD_IMAGE); } }); } /** * Upon execution, the activity will be able to display the photo that the participant selected * so long the size is within limit. * @param requestCode * @param resultCode * @param data */ // displayed the browsed image @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == RESULT_LOAD_IMAGE && resultCode == RESULT_OK && null != data) { Uri selectedImage = data.getData(); String[] filePathColumn = { MediaStore.Images.Media.DATA }; Cursor cursor = getContentResolver().query(selectedImage, filePathColumn, null, null, null); cursor.moveToFirst(); int columnIndex = cursor.getColumnIndex(filePathColumn[0]); String picturePath = cursor.getString(columnIndex); cursor.close(); photoImageView = (ImageView) findViewById(imageView); photoImageView.setImageBitmap(BitmapFactory.decodeFile(picturePath)); } } /** * Setup method for the create button, which will issue a command to create the mood event on * click. */ void setUpCreate() { createButton = (Button) findViewById(R.id.create); createButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { createMoodEvent(); } }); } /** * Setup method for the cancel button, which will issue a command to close the addition of a * mood event if the paticipant ever changes their mind. */ void setUpCancel() { cancelButton = (Button) findViewById(R.id.cancel); cancelButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { finish(); } }); } }
package de.fau.cs.mad.fablab.android.productsearch; import android.app.Dialog; import android.app.SearchManager; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.support.v7.widget.RecyclerView; import android.view.KeyEvent; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.WindowManager; import android.view.inputmethod.EditorInfo; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.AutoCompleteTextView; import android.widget.ImageView; import android.widget.TextView; import android.widget.Toast; import java.util.ArrayList; import java.util.Collections; import java.util.List; import de.fau.cs.mad.fablab.android.BaseActivity; import de.fau.cs.mad.fablab.android.R; import de.fau.cs.mad.fablab.android.productMap.LocationParser; import de.fau.cs.mad.fablab.android.productMap.ProductMapActivity; import de.fau.cs.mad.fablab.android.ui.UiUtils; import de.fau.cs.mad.fablab.rest.ProductApiClient; import de.fau.cs.mad.fablab.rest.core.Product; import de.fau.cs.mad.fablab.rest.myapi.ProductApi; import retrofit.Callback; import retrofit.RetrofitError; import retrofit.client.Response; public class ProductSearchActivity extends BaseActivity implements ProductDialog.ProductDialogListener,AdapterView.OnItemClickListener,AdapterView.OnItemSelectedListener { public static final String KEY_LOCATION = "location"; private final String KEY_SEARCHED_PRODUCTS = "searched_products"; private final String KEY_SELECTED_PRODUCT = "selected_product"; private final String KEY_PRODUCT_DIALOG = "product_dialog"; private RecyclerView.LayoutManager layoutManager; private ProductAdapter productAdapter; //our rest-callback interface private ProductApi mProductApi; private ProductDialog productDialog; private Product selectedProduct; private View spinnerContainerView; private ImageView spinnerImageView; ArrayList<Product> results = new ArrayList<Product>(); //This callback is used for product Search. private Callback<List<Product>> mSearchCallback = new Callback<List<Product>>() { @Override public void success(List<Product> products, Response response) { if (products.isEmpty()) { Toast.makeText(getBaseContext(), R.string.product_not_found, Toast.LENGTH_LONG).show(); } results.addAll(products); Collections.sort(results, new ProductSort()); productAdapter.addAll(results); productAdapter.notifyDataSetChanged(); UiUtils.hideSpinner(spinnerContainerView, spinnerImageView); getWindow().clearFlags(WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE); } @Override public void failure(RetrofitError error) { Toast.makeText(getBaseContext(), R.string.retrofit_callback_failure, Toast.LENGTH_LONG).show(); UiUtils.hideSpinner(spinnerContainerView, spinnerImageView); getWindow().clearFlags(WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE); } }; @Override protected void baseSetContentView() { setContentView(R.layout.activity_product_search); } @Override protected void baseOnCreate(Bundle savedInstanceState) { AutoCompleteHelper.getInstance().loadProductNames(this); mProductApi = new ProductApiClient(this).get(); spinnerContainerView = (View) findViewById(R.id.spinner); spinnerImageView = (ImageView) findViewById(R.id.spinner_image); initCartPanel(true); initFabButton(); //get search view and set searchable configuration SearchManager searchManager = (SearchManager) getSystemService(Context.SEARCH_SERVICE); //For Autocomplete final AutoCompleteTextView searchView = (AutoCompleteTextView) findViewById(R.id.product_search_view); searchView.setThreshold(2); //min 2 chars before autocomplete //Set adapter to AutoCompleteTextView ArrayAdapter<String>adapter = new ArrayAdapter<>(this, android.R.layout.simple_dropdown_item_1line, AutoCompleteHelper.getInstance().getPossibleAutoCompleteWords()); searchView.setAdapter(adapter); searchView.setOnItemSelectedListener(this); searchView.setOnItemClickListener(this); searchView.setOnEditorActionListener(new TextView.OnEditorActionListener() { @Override public boolean onEditorAction(TextView v, int actionId, KeyEvent event) { boolean handled = false; if (actionId == EditorInfo.IME_ACTION_SEND) { searchView.dismissDropDown(); search(searchView.getText().toString()); handled = true; } return handled; } }); //searchView.setSearchableInfo(searchManager.getSearchableInfo(getComponentName())); //do not iconify search view //searchView.setIconified(false); //get indexable list view and set adapter IndexableListView indexableListView = (IndexableListView) findViewById(R.id.product_indexable_list_view); productAdapter = new ProductAdapter(getApplicationContext(), R.layout.product_entry); indexableListView.setAdapter(productAdapter); indexableListView.setFastScrollEnabled(true); //add listener to handle click events indexableListView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { //set selected product selectedProduct = productAdapter.getItem(position); //show product dialog productDialog = ProductDialog.newInstance(selectedProduct); productDialog.show(getFragmentManager(), "product_dialog"); } }); if (savedInstanceState != null) { //recreate saved instance state productAdapter.addAll((ArrayList<Product>) savedInstanceState .getSerializable(KEY_SEARCHED_PRODUCTS)); selectedProduct = (Product) savedInstanceState.getSerializable(KEY_SELECTED_PRODUCT); } //handle intent handleIntent(getIntent()); } //For Autocomplete @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { search(String.valueOf(parent.getItemAtPosition(position))); } //For Autocomplete @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { search(String.valueOf(parent.getItemAtPosition(position))); } //For Autocomplete @Override public void onNothingSelected(AdapterView<?> parent) { UiUtils.hideKeyboard(this); } @Override protected void onNewIntent(Intent intent) { //handle intent setIntent(intent); handleIntent(intent); } private void handleIntent(Intent intent) { //verify the action and get the query if (Intent.ACTION_SEARCH.equals(intent.getAction())) { String query = intent.getStringExtra(SearchManager.QUERY); search(query); } } private void search(String query) { //show products containing the query productAdapter.clear(); //TODO maybe add a limit here? //show spinner and disable input UiUtils.showSpinner(spinnerContainerView, spinnerImageView); getWindow().setFlags(WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE, WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE); UiUtils.hideKeyboard(this); mProductApi.findByName(query, 0, 0, mSearchCallback); } @Override public void onShowLocationClick() { //show location Intent intent = new Intent(this, ProductMapActivity.class); intent.putExtra(KEY_LOCATION, selectedProduct.getLocation()); startActivity(intent); } @Override public void onAddToCartClick() { //dismiss product dialog productDialog.dismiss(); //show add to cart dialog Intent intent = new Intent(this, AddToCartActivity.class); intent.putExtra("product", selectedProduct); startActivity(intent); } @Override public void onReportClick() { //report missing product } @Override protected void onSaveInstanceState (Bundle outState) { super.onSaveInstanceState(outState); //save searched products outState.putSerializable(KEY_SEARCHED_PRODUCTS, productAdapter.getAllItems()); //save selected product outState.putSerializable(KEY_SELECTED_PRODUCT, selectedProduct); } @Override protected boolean baseOnCreateOptionsMenu(Menu menu) { appbarDrawer.showOrderByIcon(); MenuItem orderby_name = menu.getItem(1).getSubMenu().getItem(0); orderby_name.setOnMenuItemClickListener(new MenuItem.OnMenuItemClickListener() { @Override public boolean onMenuItemClick(MenuItem item) { appbarDrawer.orderByName(); // TODO /* Enter here behaviour to change order, best with: Collection sort on arraylist, then productAdapter.notifyDataSetChanged(); */ /* Toast is just for test, can be deleted afterwards */ Toast.makeText(getBaseContext(), "Name", Toast.LENGTH_SHORT).show(); return true; } }); MenuItem orderby_price = menu.getItem(1).getSubMenu().getItem(1); orderby_price.setOnMenuItemClickListener(new MenuItem.OnMenuItemClickListener() { @Override public boolean onMenuItemClick(MenuItem item) { appbarDrawer.orderByPrice(); // TODO /* Enter here behaviour to change order, best with: Collection sort on arraylist, then productAdapter.notifyDataSetChanged(); */ /* Toast is just for test, can be deleted afterwards */ Toast.makeText(getBaseContext(), "Price", Toast.LENGTH_SHORT).show(); return true; } }); return true; } }
package org.qiaoer.mobilesafer361.safe.moudle.dialog; import android.app.Dialog; import android.content.Context; import android.os.Bundle; import android.text.TextUtils; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.TextView; import org.qiaoer.mobilesafer361.R; public class EnterPasswordDialog extends Dialog implements View.OnClickListener { private TextView mTitleTextView; private EditText mEnterPasswordEditText; private Button okButton; private Button cancelButton; private MyCallBack myCallBack; public EnterPasswordDialog(Context context) { super(context, R.style.dialog_custom); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.enter_password_dialog); initView(); } private void initView() { mTitleTextView = (TextView) findViewById(R.id.enterPasswordTitleTextView); mEnterPasswordEditText = (EditText) findViewById(R.id.enterPasswordEditText); okButton = (Button) findViewById(R.id.enterPasswordOkButton); cancelButton = (Button) findViewById(R.id.enterPasswordCancelButton); okButton.setOnClickListener(this); cancelButton.setOnClickListener(this); } public void setMyCallBack(MyCallBack myCallBack) { this.myCallBack = myCallBack; } /** * * * @param title */ public void setTitle(String title) { if (!TextUtils.isEmpty(title)) { mTitleTextView.setText(title); } } @Override public void onClick(View view) { switch (view.getId()) { case R.id.enterPasswordOkButton: myCallBack.ok(); break; case R.id.enterPasswordCancelButton: myCallBack.cancel(); break; default: break; } } public interface MyCallBack { void ok(); void cancel(); } }
package pl.com.chodera.myweather.details.fragment; import android.os.Bundle; import android.support.design.widget.CollapsingToolbarLayout; import android.support.design.widget.CoordinatorLayout; import android.support.design.widget.FloatingActionButton; import android.support.design.widget.Snackbar; import android.support.v7.widget.Toolbar; import android.text.TextUtils; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.TextView; import butterknife.Bind; import butterknife.ButterKnife; import io.realm.RealmResults; import pl.com.chodera.myweather.R; import pl.com.chodera.myweather.common.BaseFragment; import pl.com.chodera.myweather.common.Commons; import pl.com.chodera.myweather.common.listeners.WeatherDownloadListener; import pl.com.chodera.myweather.details.view.WeatherLineChart; import pl.com.chodera.myweather.model.db.FavoriteLocation; import pl.com.chodera.myweather.network.DownloadingUtil; import pl.com.chodera.myweather.network.HandleWeatherResponse; import pl.com.chodera.myweather.network.response.WeatherForecastResponse; import retrofit2.Call; import retrofit2.Callback; import retrofit2.Response; public class WeatherDetailsFragment extends BaseFragment implements WeatherDownloadListener { @Bind(R.id.id_activity_details_coordinator_layout) public CoordinatorLayout coordinatorLayout; @Bind(R.id.toolbar_layout) public CollapsingToolbarLayout collapsingToolbarLayout; @Bind(R.id.item_primary_text) public TextView currentWeatherLabel; @Bind(R.id.item_current_weather_info) public TextView currentWeatherInfo; @Bind(R.id.toolbar) public Toolbar toolbar; @Bind(R.id.fab) public FloatingActionButton floatingActionButton; @Bind(R.id.chart) public WeatherLineChart chart; private boolean isLocationFavorite = false; private FavoriteLocation favoriteLocation; private String locationName; public WeatherDetailsFragment() { // Required empty public constructor } public static WeatherDetailsFragment newInstance(String param1, String param2) { WeatherDetailsFragment fragment = new WeatherDetailsFragment(); Bundle args = new Bundle(); args.putString(Commons.ArgumentParams.LOCATION_NAME, param1); args.putString(Commons.ArgumentParams.WEATHER_INFO, param2); fragment.setArguments(args); return fragment; } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { final View view = inflater.inflate(R.layout.fragment_weather_details, container, false); ButterKnife.bind(this, view); setupView(); getForecastData(); return view; } @Override public void downloadingWeatherFailed() { currentWeatherInfo.setText(getString(R.string.activity_details_weather_not_found)); } @Override public void downloadingWeatherSucceeded(String weatherInfo, String name) { currentWeatherInfo.setText(weatherInfo); setActivityTitle(name); checkIsLocationSavedAsFavourite(); setupFavButtonAction(); } private void setupView() { setupToolbar(toolbar); changeToBackNavigationMode(); setupCurrentWeatherInfo(); checkIsLocationSavedAsFavourite(); setFavButtonIcon(); } private void setupCurrentWeatherInfo() { currentWeatherLabel.setText(R.string.activity_details_current_weather_label); String weatherInfo = null; if (getArguments() != null) { locationName = getArguments().getString(Commons.ArgumentParams.LOCATION_NAME); weatherInfo = getArguments().getString(Commons.ArgumentParams.WEATHER_INFO); } if (TextUtils.isEmpty(weatherInfo)) { setActivityTitle(getString(R.string.loading_message)); DownloadingUtil.getWeather(locationName, new HandleWeatherResponse(this)); } else { setupFavButtonAction(); currentWeatherInfo.setText(weatherInfo); setActivityTitle(locationName); } } private void checkIsLocationSavedAsFavourite() { RealmResults<FavoriteLocation> favoriteLocationRealmResult = getRealmInstance().where(FavoriteLocation.class) .equalTo("name", locationName) .findAll(); if (favoriteLocationRealmResult.size() == 1) { isLocationFavorite = true; favoriteLocation = favoriteLocationRealmResult.get(0); } } private void setupFavButtonAction() { floatingActionButton.setOnClickListener(view -> { addOrRemoveLocationFromFavorites(); isLocationFavorite = !isLocationFavorite; setFavButtonIcon(); showSnackBar(); }); } private void addOrRemoveLocationFromFavorites() { getRealmInstance().beginTransaction(); if (isLocationFavorite) { if (favoriteLocation != null) { favoriteLocation.deleteFromRealm(); } } else { FavoriteLocation newFavoriteLocation = new FavoriteLocation(locationName); getRealmInstance().copyToRealm(newFavoriteLocation); favoriteLocation = newFavoriteLocation; } getRealmInstance().commitTransaction(); } private void setFavButtonIcon() { if (isLocationFavorite) { floatingActionButton.setImageResource(R.drawable.ic_action_favorited); } else { floatingActionButton.setImageResource(R.drawable.ic_action_not_favorited); } } private void showSnackBar() { String message; if (isLocationFavorite) { message = getString(R.string.activity_details_location_saved_as_favourite); } else { message = getString(R.string.activity_details_location_removed_from_favourite); } Snackbar.make(floatingActionButton, message, Snackbar.LENGTH_LONG).show(); } private void getForecastData() { DownloadingUtil.getForecastWeather(locationName, new Callback<WeatherForecastResponse>() { @Override public void onResponse(Call<WeatherForecastResponse> call, Response<WeatherForecastResponse> response) { chart.setVisibility(View.VISIBLE); if (response.isSuccessful()) { chart.setForecastDataToChart(response); } } @Override public void onFailure(Call<WeatherForecastResponse> call, Throwable t) { chart.setVisibility(View.VISIBLE); } }); } private void setActivityTitle(String title) { collapsingToolbarLayout.setTitle(title); } }
package org.appng.maven.plugin.mojo; import java.io.File; import java.util.Arrays; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.Parameter; import org.sonatype.plexus.build.incremental.BuildContext; abstract class AbstractGeneratorMojo extends AbstractMojo { /** * the path to the source file */ @Parameter(property = "filePath", required = true) protected File filePath; /** * the fully qualified name of the target class to generate */ @Parameter(property = "targetClass", required = true) protected String targetClass; /** * the output-folder for the generated class */ @Parameter(property = "outFolder", defaultValue = "target/generated-sources/constants", required = false) protected File outfolder; /** * skips the execution */ @Parameter(property = "skip", defaultValue = "false", required = false) protected boolean skip; @Component protected BuildContext buildContext; @Override public void execute() throws MojoExecutionException, MojoFailureException { if (skip) { getLog().info("skipping " + getMessage()); return; } if (needsToBeExecuted()) { try { getLog().info(getMessage() + " for " + Arrays.toString(getArgs())); createConstantClass(); buildContext.refresh(outfolder.getAbsoluteFile()); } catch (Exception e) { buildContext.addMessage(filePath, 0, 0, "unable to " + getMessage(), BuildContext.SEVERITY_ERROR, e); throw new MojoExecutionException("unable to " + getMessage(), e); } } else { getLog().debug("no creation needed: " + getMessage()); } getLog().debug("delta: " + buildContext.hasDelta(filePath)); getLog().debug("incremental: " + buildContext.isIncremental()); } protected abstract void createConstantClass() throws Exception; protected abstract String getMessage(); protected abstract String[] getArgs(); protected boolean needsToBeExecuted() { return buildContext.hasDelta(filePath); } }
package ru.trylogic.spring.boot.thrift; import io.micrometer.core.instrument.MeterRegistry; import lombok.Getter; import lombok.Setter; import lombok.SneakyThrows; import org.apache.thrift.TProcessor; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TProtocolFactory; import org.apache.thrift.server.TServlet; import org.springframework.aop.framework.ProxyFactory; import org.springframework.aop.target.SingletonTargetSource; import org.springframework.beans.BeanUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication; import org.springframework.boot.web.servlet.RegistrationBean; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.util.ClassUtils; import ru.trylogic.spring.boot.thrift.annotation.ThriftController; import ru.trylogic.spring.boot.thrift.aop.LoggingThriftMethodInterceptor; import ru.trylogic.spring.boot.thrift.aop.MetricsThriftMethodInterceptor; import javax.servlet.ServletContext; import javax.servlet.ServletRegistration; import java.lang.reflect.Constructor; @Configuration @ConditionalOnClass({ThriftController.class}) @ConditionalOnWebApplication public class ThriftAutoConfiguration { public interface ThriftConfigurer { void configureProxyFactory(ProxyFactory proxyFactory); } @Bean @ConditionalOnMissingBean(ThriftConfigurer.class) ThriftConfigurer thriftConfigurer() { return new DefaultThriftConfigurer(); } @Bean @ConditionalOnMissingBean(TProtocolFactory.class) TProtocolFactory thriftProtocolFactory() { return new TBinaryProtocol.Factory(); } @Bean @ConditionalOnMissingBean(LoggingThriftMethodInterceptor.class) LoggingThriftMethodInterceptor loggingThriftMethodInterceptor() { return new LoggingThriftMethodInterceptor(); } public static class DefaultThriftConfigurer implements ThriftConfigurer { @Autowired(required = false) private MeterRegistry meterRegistry; @Autowired private LoggingThriftMethodInterceptor loggingThriftMethodInterceptor; public void configureProxyFactory(ProxyFactory proxyFactory) { if (meterRegistry != null) { proxyFactory.addAdvice(new MetricsThriftMethodInterceptor(meterRegistry)); } proxyFactory.addAdvice(loggingThriftMethodInterceptor); } } @Configuration public static class Registrar extends RegistrationBean implements ApplicationContextAware { @Getter @Setter private ApplicationContext applicationContext; @Autowired private TProtocolFactory protocolFactory; @Autowired private ThriftConfigurer thriftConfigurer; @Override protected String getDescription() { return "Thrift services"; } @Override @SneakyThrows({NoSuchMethodException.class, ClassNotFoundException.class, InstantiationException.class, IllegalAccessException.class}) protected void register(String description, ServletContext servletContext) { for (String beanName : applicationContext.getBeanNamesForAnnotation(ThriftController.class)) { ThriftController annotation = applicationContext.findAnnotationOnBean(beanName, ThriftController.class); register(servletContext, annotation.value(), protocolFactory.getClass(), applicationContext.getBean(beanName)); } } protected void register(ServletContext servletContext, String[] urls, Class<? extends TProtocolFactory> factory, Object handler) throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InstantiationException { Class<?>[] handlerInterfaces = ClassUtils.getAllInterfaces(handler); Class ifaceClass = null; Class<TProcessor> processorClass = null; Class serviceClass = null; for (Class<?> handlerInterfaceClass : handlerInterfaces) { if (!handlerInterfaceClass.getName().endsWith("$Iface")) { continue; } serviceClass = handlerInterfaceClass.getDeclaringClass(); if (serviceClass == null) { continue; } for (Class<?> innerClass : serviceClass.getDeclaredClasses()) { if (!innerClass.getName().endsWith("$Processor")) { continue; } if (!TProcessor.class.isAssignableFrom(innerClass)) { continue; } if (ifaceClass != null) { throw new IllegalStateException("Multiple Thrift Ifaces defined on handler"); } ifaceClass = handlerInterfaceClass; processorClass = (Class<TProcessor>) innerClass; break; } } if (ifaceClass == null) { throw new IllegalStateException("No Thrift Ifaces found on handler"); } handler = wrapHandler(ifaceClass, handler); Constructor<TProcessor> processorConstructor = processorClass.getConstructor(ifaceClass); TProcessor processor = BeanUtils.instantiateClass(processorConstructor, handler); TServlet servlet; if (TProtocolFactory.class.equals(factory)) { servlet = getServlet(processor, protocolFactory); } else { servlet = getServlet(processor, factory.newInstance()); } String servletBeanName = handler.getClass().getSimpleName() + "Servlet"; ServletRegistration.Dynamic registration = servletContext.addServlet(servletBeanName, servlet); if (urls != null && urls.length > 0) { registration.addMapping(urls); } else { registration.addMapping("/" + serviceClass.getSimpleName()); } } protected TServlet getServlet(TProcessor processor, TProtocolFactory protocolFactory) { return new TServlet(processor, protocolFactory); } protected <T> T wrapHandler(Class<T> interfaceClass, T handler) { ProxyFactory proxyFactory = new ProxyFactory(interfaceClass, new SingletonTargetSource(handler)); thriftConfigurer.configureProxyFactory(proxyFactory); //TODO remove from here? proxyFactory.setFrozen(true); return (T) proxyFactory.getProxy(); } } }
package org.jasig.cas.config; import org.jasig.cas.services.ServicesManager; import org.jasig.cas.services.web.RegisteredServiceThemeBasedViewResolver; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Lazy; import org.springframework.core.io.Resource; import org.springframework.validation.beanvalidation.LocalValidatorFactoryBean; import org.springframework.web.servlet.i18n.CookieLocaleResolver; import org.springframework.web.servlet.i18n.LocaleChangeInterceptor; import org.springframework.web.servlet.mvc.SimpleControllerHandlerAdapter; import org.springframework.web.servlet.theme.ThemeChangeInterceptor; import org.springframework.web.servlet.view.AbstractCachingViewResolver; import org.springframework.web.servlet.view.BeanNameViewResolver; import org.springframework.web.servlet.view.InternalResourceView; import org.springframework.web.servlet.view.UrlBasedViewResolver; import org.springframework.web.servlet.view.XmlViewResolver; import org.springframework.web.servlet.view.script.ScriptTemplateViewResolver; import javax.validation.MessageInterpolator; import java.util.Locale; /** * This is {@link CasWebAppConfiguration}. * * @author Misagh Moayyed * @since 4.3.0 */ @Configuration("casWebAppConfiguration") @Lazy(true) public class CasWebAppConfiguration { /** * The constant URL_VIEW_RESOLVER_ORDER. */ private static final int URL_VIEW_RESOLVER_ORDER = 2000; /** * The Message interpolator. */ @Autowired @Qualifier("messageInterpolator") private MessageInterpolator messageInterpolator; /** * The Theme param name. */ @Value("${cas.themeResolver.param.name:theme}") private String themeParamName; /** * The Path prefix. */ @Value("${cas.themeResolver.pathprefix:/WEB-INF/view/jsp}") private String pathPrefix; /** * The Xml views file. */ @Value("${cas.viewResolver.xmlFile:classpath:/META-INF/spring/views.xml}") private Resource xmlViewsFile; /** * The Services manager. */ @Autowired @Qualifier("servicesManager") private ServicesManager servicesManager; /** * The Default locale. */ @Value("${locale.default:en}") private Locale defaultLocale; /** * The Locale param name. */ @Value("${locale.param.name:locale}") private String localeParamName; /** * Credentials validator local validator factory bean. * * @return the local validator factory bean */ @Bean(name = "credentialsValidator") public LocalValidatorFactoryBean credentialsValidator() { final LocalValidatorFactoryBean bean = new LocalValidatorFactoryBean(); bean.setMessageInterpolator(this.messageInterpolator); return bean; } /** * Theme change interceptor theme change interceptor. * * @return the theme change interceptor */ @Bean(name = "themeChangeInterceptor") public ThemeChangeInterceptor themeChangeInterceptor() { final ThemeChangeInterceptor bean = new ThemeChangeInterceptor(); bean.setParamName(this.themeParamName); return bean; } /** * Bean name view resolver bean name view resolver. * * @return the bean name view resolver */ @Bean(name = "beanNameViewResolver") public BeanNameViewResolver beanNameViewResolver() { final BeanNameViewResolver bean = new BeanNameViewResolver(); bean.setOrder(1); return bean; } /** * Xml view resolver abstract caching view resolver. * * @return the abstract caching view resolver */ @Bean(name = "xmlViewResolver") public AbstractCachingViewResolver xmlViewResolver() { if (xmlViewsFile.exists()) { final XmlViewResolver bean = new XmlViewResolver(); bean.setOrder(URL_VIEW_RESOLVER_ORDER - 1); bean.setLocation(xmlViewsFile); return bean; } final ScriptTemplateViewResolver bean = new ScriptTemplateViewResolver(); bean.setOrder(URL_VIEW_RESOLVER_ORDER - 1); return bean; } /** * Url based view resolver url based view resolver. * * @return the url based view resolver */ @Bean(name = "urlBasedViewResolver") public UrlBasedViewResolver urlBasedViewResolver() { final UrlBasedViewResolver bean = new UrlBasedViewResolver(); bean.setViewClass(InternalResourceView.class); bean.setPrefix(this.pathPrefix); bean.setSuffix(".jsp"); bean.setOrder(URL_VIEW_RESOLVER_ORDER); return bean; } /** * Internal view resolver registered service theme based view resolver. * * @return the registered service theme based view resolver */ @Bean(name = "internalViewResolver") public RegisteredServiceThemeBasedViewResolver internalViewResolver() { final RegisteredServiceThemeBasedViewResolver bean = new RegisteredServiceThemeBasedViewResolver(this.servicesManager); bean.setPrefix(this.pathPrefix); bean.setOrder(URL_VIEW_RESOLVER_ORDER + 1); return bean; } /** * Locale resolver cookie locale resolver. * * @return the cookie locale resolver */ @Bean(name = "localeResolver") public CookieLocaleResolver localeResolver() { final CookieLocaleResolver bean = new CookieLocaleResolver(); bean.setDefaultLocale(this.defaultLocale); return bean; } /** * Locale change interceptor locale change interceptor. * * @return the locale change interceptor */ @Bean(name = "localeChangeInterceptor") public LocaleChangeInterceptor localeChangeInterceptor() { final LocaleChangeInterceptor bean = new LocaleChangeInterceptor(); bean.setParamName(this.localeParamName); return bean; } /** * Simple controller handler adapter. * * @return the simple controller handler adapter */ @Bean(name = "simpleControllerHandlerAdapter") public SimpleControllerHandlerAdapter simpleControllerHandlerAdapter() { return new SimpleControllerHandlerAdapter(); } }
package org.jnosql.diana.cassandra.column; import com.datastax.driver.core.ConsistencyLevel; import com.datastax.driver.core.PagingState; import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSetFuture; import com.datastax.driver.core.Row; import com.datastax.driver.core.querybuilder.BuiltStatement; import org.jnosql.diana.api.column.ColumnEntity; import org.jnosql.diana.api.column.ColumnQuery; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.function.Consumer; import static java.util.Collections.emptyList; import static java.util.stream.Collectors.toList; enum QueryExecutorType implements QueryExecutor { PAGING_STATE { @Override public List<ColumnEntity> execute(String keyspace, ColumnQuery query, DefaultCassandraColumnFamilyManager manager) { return execute(keyspace, query, null, manager); } @Override public List<ColumnEntity> execute(String keyspace, ColumnQuery q, ConsistencyLevel level, DefaultCassandraColumnFamilyManager manager) { CassandraQuery query = CassandraQuery.class.cast(q); if (query.isExhausted()) { return emptyList(); } BuiltStatement select = QueryUtils.select(query, keyspace); if (Objects.nonNull(level)) { select.setConsistencyLevel(level); } query.toPatingState().ifPresent(select::setPagingState); ResultSet resultSet = manager.getSession().execute(select); PagingState pagingState = resultSet.getExecutionInfo().getPagingState(); query.setPagingState(pagingState); List<ColumnEntity> entities = new ArrayList<>(); for (Row row : resultSet) { entities.add(CassandraConverter.toDocumentEntity(row)); if (resultSet.getAvailableWithoutFetching() == 0) { query.setExhausted(resultSet.isExhausted()); break; } } return entities; } @Override public void execute(String keyspace, ColumnQuery query, Consumer<List<ColumnEntity>> consumer, DefaultCassandraColumnFamilyManagerAsync manager) { execute(keyspace, query, null, consumer, manager); } @Override public void execute(String keyspace, ColumnQuery q, ConsistencyLevel level, Consumer<List<ColumnEntity>> consumer, DefaultCassandraColumnFamilyManagerAsync manager) { CassandraQuery query = CassandraQuery.class.cast(q); if (query.isExhausted()) { consumer.accept(emptyList()); return; } BuiltStatement select = QueryUtils.select(query, keyspace); if (Objects.nonNull(level)) { select.setConsistencyLevel(level); } query.toPatingState().ifPresent(select::setPagingState); ResultSetFuture resultSet = manager.getSession().executeAsync(select); Runnable executeAsync = new CassandraReturnQueryPagingStateAsync(resultSet, consumer, query); resultSet.addListener(executeAsync, manager.getExecutor()); } }, DEFAULT { @Override public List<ColumnEntity> execute(String keyspace, ColumnQuery query, DefaultCassandraColumnFamilyManager manager) { return execute(keyspace, query, null, manager); } @Override public List<ColumnEntity> execute(String keyspace, ColumnQuery query, ConsistencyLevel level, DefaultCassandraColumnFamilyManager manager) { BuiltStatement select = QueryUtils.select(query, keyspace); if (Objects.nonNull(level)) { select.setConsistencyLevel(level); } ResultSet resultSet = manager.getSession().execute(select); return resultSet.all().stream().map(CassandraConverter::toDocumentEntity) .collect(toList()); } @Override public void execute(String keyspace, ColumnQuery query, Consumer<List<ColumnEntity>> consumer, DefaultCassandraColumnFamilyManagerAsync manager) { execute(keyspace, query, null, consumer, manager); } @Override public void execute(String keyspace, ColumnQuery query, ConsistencyLevel level, Consumer<List<ColumnEntity>> consumer, DefaultCassandraColumnFamilyManagerAsync manager) { BuiltStatement select = QueryUtils.select(query, keyspace); if (Objects.nonNull(level)) { select.setConsistencyLevel(level); } ResultSetFuture resultSet = manager.getSession().executeAsync(select); Runnable executeAsync = new CassandraReturnQueryAsync(resultSet, consumer); resultSet.addListener(executeAsync, manager.getExecutor()); } }; }
package io.cattle.platform.iaas.api.auth.identity; import io.cattle.platform.api.auth.Identity; import io.cattle.platform.iaas.api.auth.SecurityConstants; import io.cattle.platform.iaas.api.auth.TokenUtils; import io.cattle.platform.iaas.api.auth.integration.github.GithubConstants; import io.github.ibuildthecloud.gdapi.annotation.Field; import io.github.ibuildthecloud.gdapi.annotation.Type; import java.util.List; @Type(name = TokenUtils.TOKEN) public class Token { private String jwt; private String code; private String user; private Boolean security = SecurityConstants.SECURITY.get(); private String userType; private String authProvider = SecurityConstants.AUTH_PROVIDER.get(); private String accountId; private Identity userIdentity; private boolean enabled = security; private List<Identity> identities; public Token(String jwt, String accountId, Identity userIdentity, List<Identity> identities, String userType) { this.jwt = jwt; this.userIdentity = userIdentity; this.accountId = accountId; this.identities = identities; this.user = userIdentity.getLogin(); this.userType = userType; } public Token() { } @Field(nullable = true) public String getJwt() { return jwt; } @Field(nullable = true) public void setCode(String code) { this.code = code; } @Field(nullable = true) public String getUser() { return user; } @Field(nullable = true) public Boolean getSecurity() { return security; } @Field(nullable = true) public String getClientId() { return GithubConstants.CONFIG.equalsIgnoreCase(SecurityConstants.AUTH_PROVIDER.get()) ? GithubConstants.GITHUB_CLIENT_ID.get() : null; } @Field(nullable = true) public String getUserType() { return userType; } @Field(nullable = true) public String getAccountId() { return accountId; } @Field(nullable = true) public String getHostname() { return GithubConstants.CONFIG.equalsIgnoreCase(SecurityConstants.AUTH_PROVIDER.get()) ? GithubConstants.GITHUB_HOSTNAME.get() : null; } @Field(nullable = true, required = true) public String getCode() { return code; } @Field(nullable = true) public Identity getUserIdentity() { return userIdentity; } public void setUserIdentity(Identity user) { this.userIdentity = user; } @Field(nullable = true) public boolean isEnabled() { return enabled; } @Field(nullable = true) public Identity[] getIdentities() { return identities.toArray(new Identity[identities.size()]); } public void setIdentities(List<Identity> identities) { this.identities = identities; } @Field(nullable = true) public String getAuthProvider() { return authProvider; } public void setJwt(String jwt) { this.jwt = jwt; } }
package edu.duke.cabig.c3pr.utils; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.List; import edu.duke.cabig.c3pr.constants.CoordinatingCenterStudyStatus; import edu.duke.cabig.c3pr.constants.OrganizationIdentifierTypeEnum; import edu.duke.cabig.c3pr.constants.RandomizationType; import edu.duke.cabig.c3pr.constants.StudyDataEntryStatus; import edu.duke.cabig.c3pr.domain.Arm; import edu.duke.cabig.c3pr.domain.BookRandomization; import edu.duke.cabig.c3pr.domain.BookRandomizationEntry; import edu.duke.cabig.c3pr.domain.CompanionStudyAssociation; import edu.duke.cabig.c3pr.domain.Consent; import edu.duke.cabig.c3pr.domain.EligibilityCriteria; import edu.duke.cabig.c3pr.domain.Epoch; import edu.duke.cabig.c3pr.domain.HealthcareSite; import edu.duke.cabig.c3pr.domain.InclusionEligibilityCriteria; import edu.duke.cabig.c3pr.domain.LocalHealthcareSite; import edu.duke.cabig.c3pr.domain.OrganizationAssignedIdentifier; import edu.duke.cabig.c3pr.domain.PhoneCallRandomization; import edu.duke.cabig.c3pr.domain.Randomization; import edu.duke.cabig.c3pr.domain.StratificationCriterion; import edu.duke.cabig.c3pr.domain.StratificationCriterionAnswerCombination; import edu.duke.cabig.c3pr.domain.StratificationCriterionPermissibleAnswer; import edu.duke.cabig.c3pr.domain.StratumGroup; import edu.duke.cabig.c3pr.domain.Study; import edu.duke.cabig.c3pr.domain.StudyCoordinatingCenter; import edu.duke.cabig.c3pr.domain.StudySite; public class StudyCreationHelper { public Study getMultiSiteRandomizedStudy(RandomizationType randomizationType) throws Exception { Study study = buildBasicStudy(true, randomizationType); Epoch epoch = getTreatmentEpochWithArm(); addRandomization(randomizationType, epoch); study.addEpoch(epoch); return study; } public Study getMultiSiteNonRandomizedStudy(Boolean reserving, Boolean enrolling) { Study study = buildBasicStudy(true, null); Epoch epoch = new Epoch(); epoch.setName("screening"); epoch.setReservationIndicator(reserving); epoch.setEnrollmentIndicator(enrolling); study.addEpoch(epoch); return study; } public Study getMultiSiteNonRandomizedWithArmStudy() { Study study = buildBasicStudy(true, null); Epoch epoch = getTreatmentEpochWithArm(); study.addEpoch(epoch); return study; } public Study getLocalRandomizedStudy(RandomizationType randomizationType) throws Exception { Study study = buildBasicStudy(false, randomizationType); Epoch epoch = getTreatmentEpochWithArm(); addRandomization(randomizationType, epoch); study.addEpoch(epoch); addDefaultConsentToStudy(study); return study; } public Study addDefaultConsentToStudy(Study study){ Consent consent = new Consent(); consent.setName("default consent"); study.getStudyVersion().addConsent(consent); return study; } public Study getLocalStudyWith1stEpochRandomized2ndNonRandomized(RandomizationType randomizationType) throws Exception { Study study = buildBasicStudy(false, randomizationType); Epoch epoch = getTreatmentEpochWithArm(); addRandomization(randomizationType, epoch); epoch.setEpochOrder(1); study.addEpoch(epoch); Epoch epoch1 = getNonRandomizedNonStratifiedEnrollingEpochWithoutArm(); epoch1.setEpochOrder(2); study.addEpoch(epoch1); return study; } public Study getLocalNonRandomizedStudy(Boolean reserving, Boolean enrolling) { Study study = buildBasicStudy(false, null); Epoch epoch = new Epoch(); epoch.setName("screening"); epoch.setReservationIndicator(reserving); epoch.setEnrollmentIndicator(enrolling); study.addEpoch(epoch); return study; } public Study getLocalNonRandomizedTratmentWithArmStudy() { Study study = buildBasicStudy(false, null); Epoch epoch = getTreatmentEpochWithArm(); epoch.setRandomizedIndicator(false); study.addEpoch(epoch); return study; } public Study getLocalNonRandomizedTratmentWithoutArmStudy() { Study study = buildBasicStudy(false, null); Epoch epoch = new Epoch(); epoch.setName("epoch1"); study.addEpoch(epoch); epoch.setEnrollmentIndicator(true); return study; } private Epoch getTreatmentEpochWithArm() { Arm armA = new Arm(); armA.setName("A"); Epoch epoch = new Epoch(); ArrayList<Arm> aList = new ArrayList<Arm>(); aList.add(armA); epoch.getArms().addAll(aList); epoch.setName("epoch1"); epoch.setRandomizedIndicator(false); epoch.setEnrollmentIndicator(true); return epoch; } private Epoch getNonRandomizedNonStratifiedEnrollingEpochWithoutArm() { Epoch epoch = new Epoch(); epoch.setName("epoch2"); epoch.setRandomizedIndicator(false); epoch.setEnrollmentIndicator(true); return epoch; } private ArrayList<StratumGroup> buildStratumGroupWithScac() { StratificationCriterionAnswerCombination scac = new StratificationCriterionAnswerCombination(); List<StratificationCriterionAnswerCombination> scacList = new ArrayList<StratificationCriterionAnswerCombination>(); scacList.add(scac); StratumGroup stratumGroup = new StratumGroup(); stratumGroup.getStratificationCriterionAnswerCombination().addAll(scacList); stratumGroup.setCurrentPosition(1); stratumGroup.setStratumGroupNumber(2); ArrayList<StratumGroup> sgList = new ArrayList<StratumGroup>(); sgList.add(stratumGroup); return sgList; } private void addStratumGroupToEpoch(Epoch epoch1) { StratificationCriterion sc = new StratificationCriterion(); sc.setQuestionText("will I work?"); StratificationCriterionPermissibleAnswer scpa1 = new StratificationCriterionPermissibleAnswer(); scpa1.setPermissibleAnswer("lets find out1"); StratificationCriterionPermissibleAnswer scpa2 = new StratificationCriterionPermissibleAnswer(); scpa2.setPermissibleAnswer("lets find out2"); ArrayList scpaList = new ArrayList(); scpaList.add(scpa1); scpaList.add(scpa2); sc.getPermissibleAnswers().addAll(scpaList); ArrayList scList = new ArrayList(); scList.add(sc); epoch1.getStratificationCriteria().addAll(scList); StratificationCriterionAnswerCombination scac1 = new StratificationCriterionAnswerCombination(); scac1.setStratificationCriterion(sc); scac1.setStratificationCriterionPermissibleAnswer(scpa1); StratificationCriterionAnswerCombination scac2 = new StratificationCriterionAnswerCombination(); scac2.setStratificationCriterion(sc); scac2.setStratificationCriterionPermissibleAnswer(scpa2); List<StratificationCriterionAnswerCombination> scacList = new ArrayList<StratificationCriterionAnswerCombination>(); scacList.add(scac1); StratumGroup stratumGroup = new StratumGroup(); stratumGroup.getStratificationCriterionAnswerCombination().addAll(scacList); stratumGroup.setCurrentPosition(0); stratumGroup.setStratumGroupNumber(0); ArrayList<StratumGroup> sgList = new ArrayList<StratumGroup>(); sgList.add(stratumGroup); epoch1.getStratumGroups().addAll(sgList); } public Study buildBasicStudy(Boolean multiSite, RandomizationType randomizationType) { Study study = new Study(); study.setPrecisText("Study with randomization"); study.setShortTitleText("ShortTitleText1"); study.setLongTitleText("LongTitleText1"); study.setPhaseCode("Phase I Trial"); study.setStratificationIndicator(true); study.setCoordinatingCenterStudyStatus(CoordinatingCenterStudyStatus.OPEN); study.setDataEntryStatus(StudyDataEntryStatus.COMPLETE); study.setTargetAccrualNumber(150); study.setType("Diagnostic"); study.setDescriptionText("Description Text"); study.setOriginalIndicator(true); study.setMultiInstitutionIndicator(multiSite); if (randomizationType != null) { study.setRandomizedIndicator(Boolean.TRUE); study.setRandomizationType(randomizationType); } else { study.setRandomizedIndicator(Boolean.FALSE); } return study; } private void addRandomization(RandomizationType randomizationType, Epoch epoch) throws Exception { epoch.setRandomizedIndicator(true); if (randomizationType == RandomizationType.BOOK) addBookRandomization(epoch); else if (randomizationType == RandomizationType.PHONE_CALL) addPhoneCallRandomization(epoch); else { throw new Exception("Invalid Randomization Type"); } } private void addBookRandomization(Epoch epoch) { BookRandomization bRandomization = new BookRandomization(); BookRandomizationEntry bre = new BookRandomizationEntry(); bre.setPosition(0); if (epoch != null) { List<Arm> armList = epoch.getArms(); for (Arm individualArm : armList) { if (individualArm.getName().equals("A")) { bre.setArm(individualArm); } } } addStratumGroupToEpoch(epoch); bre.setStratumGroup(epoch.getStratumGroups().get(0)); epoch.getStratumGroups().get(0).getBookRandomizationEntry().add(bre); List<BookRandomizationEntry> breList = new ArrayList<BookRandomizationEntry>(); breList.add(bre); bRandomization.getBookRandomizationEntry().addAll(breList); epoch.setRandomization(bRandomization); } private void addPhoneCallRandomization(Epoch epoch) { Randomization pRandomization = new PhoneCallRandomization(); ((PhoneCallRandomization) pRandomization).setPhoneNumber("777 777 7777"); epoch.setRandomization(pRandomization); } public Study createBasicStudy() { Study study = new Study(); study.setPrecisText("New study"); study.setShortTitleText("ShortTitleText"); study.setLongTitleText("LongTitleText"); study.setPhaseCode("PhaseCode"); study.setRandomizedIndicator(new Boolean(false)); study.setStratificationIndicator(false); study.setCoordinatingCenterStudyStatus(CoordinatingCenterStudyStatus.PENDING); study.setDataEntryStatus(StudyDataEntryStatus.INCOMPLETE); study.setTargetAccrualNumber(150); study.setType("Type"); study.setMultiInstitutionIndicator(Boolean.TRUE); study.setStratificationIndicator(Boolean.FALSE); study.setOriginalIndicator(true); study.getStudyVersion().setName("name"); study.getStudyVersion().setVersionDate(new Date()); return study; } public Study addStudySiteAndEnrollingEpochToBasicStudy(Study study) { return addStudySiteAndEnrollingEpochToBasicStudy(study,"Name"); } public Study addStudySiteAndEnrollingEpochToBasicStudy(Study study,String name) { EligibilityCriteria criteria = new InclusionEligibilityCriteria(); StudySite studySite = new StudySite(); HealthcareSite hcs = new LocalHealthcareSite(); hcs.setNCICode("NCI_CODE"); studySite.setHealthcareSite(hcs); study.addStudySite(studySite); Epoch epoch = new Epoch(); epoch.setName(name); epoch.addEligibilityCriterion(criteria); epoch.setEnrollmentIndicator(new Boolean(true)); study.addEpoch(epoch); return study; } public Study addStudySiteAndRandomizedTreatmentEpochToBasicStudy(Study study) { study.addStudySite(new StudySite()); Epoch treatmentEpoch = new Epoch(); treatmentEpoch.setName("Treatment Epoch1"); treatmentEpoch.setRandomizedIndicator(new Boolean(true)); treatmentEpoch.setEnrollmentIndicator(new Boolean(true)); study.addEpoch(treatmentEpoch); return study; } public Study addStudySiteAndRandomizedTreatmentEpochWith2ArmsToBasicStudy(Study study) { study.addStudySite(new StudySite()); Epoch treatmentEpoch = new Epoch(); treatmentEpoch.setName("Treatment Epoch1"); Arm arm1 = new Arm(); arm1.setName("arm1"); treatmentEpoch.addArm(arm1); Arm arm2 = new Arm(); arm2.setName("arm2"); treatmentEpoch.addArm(arm2); treatmentEpoch.setRandomizedIndicator(new Boolean(true)); treatmentEpoch.setEnrollmentIndicator(new Boolean(true)); study.addEpoch(treatmentEpoch); return study; } public Study addStudySiteRandomizedEnrollingTreatmentEpochWith2ArmsAndStratumGroupsToBasicStudy( Study study) { StudySite studySite = new StudySite(); studySite.setIrbApprovalDate(Calendar.getInstance().getTime()); studySite.getStudySiteStudyVersion().setStartDate(new Date()); study.addStudySite(studySite); Epoch treatmentEpoch = new Epoch(); treatmentEpoch.setName("Treatment Epoch1"); Arm arm1 = new Arm(); arm1.setName("arm1"); treatmentEpoch.addArm(arm1); Arm arm2 = new Arm(); arm2.setName("arm2"); treatmentEpoch.addArm(arm2); addStratumGroupToEpoch(treatmentEpoch); treatmentEpoch.setRandomizedIndicator(new Boolean(true)); treatmentEpoch.setEnrollmentIndicator(new Boolean(true)); study.addEpoch(treatmentEpoch); return study; } public Study addStudySiteRandomizedTreatmentEpochWith2ArmsStratumGroupsAndRandomizationToBasicStudy( Study study) { addStudySiteRandomizedEnrollingTreatmentEpochWith2ArmsAndStratumGroupsToBasicStudy(study); addPhoneCallRandomization(study.getEpochs().get(0)); return study; } public void addStudySiteAsCooordinatingCenter(Study study) { StudySite studySite=study.getStudySites().get(0); StudyCoordinatingCenter studyCoordinatingCenter = study.getStudyCoordinatingCenters() .get(0); studyCoordinatingCenter.setHealthcareSite(studySite.getHealthcareSite()); studyCoordinatingCenter.setStudy(study); } public Study createBasicStudyObject(){ Study study = new Study(false); study.setPrecisText("New study"); study.setShortTitleText("ShortTitleText"); study.setLongTitleText("LongTitleText"); study.setPhaseCode("PhaseCode"); study.setRandomizedIndicator(new Boolean(false)); study.setStratificationIndicator(false); study.setCoordinatingCenterStudyStatus(CoordinatingCenterStudyStatus.PENDING); study.setDataEntryStatus(StudyDataEntryStatus.INCOMPLETE); study.setTargetAccrualNumber(150); study.setType("Type"); study.setMultiInstitutionIndicator(Boolean.TRUE); study.setStratificationIndicator(Boolean.FALSE); return study; } public Study addOrganizationAssignedIdentifier(Study study, OrganizationIdentifierTypeEnum type, String value){ OrganizationAssignedIdentifier orgIdentifier = new OrganizationAssignedIdentifier(); orgIdentifier.setType(type); orgIdentifier.setValue(value); orgIdentifier.setHealthcareSite(new LocalHealthcareSite()); study.getOrganizationAssignedIdentifiers().add(orgIdentifier); return study; } public Study addOrganizationAssignedIdentifierNonPrimary(Study study, OrganizationIdentifierTypeEnum type, String value){ OrganizationAssignedIdentifier orgIdentifier = new OrganizationAssignedIdentifier(); orgIdentifier.setType(type); orgIdentifier.setValue(value); orgIdentifier.setHealthcareSite(new LocalHealthcareSite()); orgIdentifier.setPrimaryIndicator(false); study.getOrganizationAssignedIdentifiers().add(orgIdentifier); return study; } public Study addNonEnrollingEpochToBasicStudy(Study study) { return addNonEnrollingEpochToBasicStudy(study, "Name"); } public Study addNonEnrollingEpochToBasicStudy(Study study, String name) { Epoch epoch = new Epoch(); epoch.setName(name); epoch.setEnrollmentIndicator(false); study.addEpoch(epoch); return study; } public Study addStratifiedEpochToBasicStudy(Study study, String name) { Epoch epoch = new Epoch(); epoch.setName(name); epoch.setStratificationIndicator(true); study.addEpoch(epoch); return study; } public Study addParentStudyAssociation(Study parent, Study child){ parent.setCompanionIndicator(false); child.setCompanionIndicator(true); CompanionStudyAssociation association = new CompanionStudyAssociation(); association.setId(1); association.setParentStudyVersion(parent.getStudyVersion()); association.setCompanionStudy(child); child.getParentStudyAssociations().add(association); return child; } public Study addParentStudyAssociationWithSite(Study parent, Study child){ parent.setCompanionIndicator(false); child.setCompanionIndicator(true); CompanionStudyAssociation association = new CompanionStudyAssociation(); association.setId(1); association.setParentStudyVersion(parent.getStudyVersion()); association.setCompanionStudy(child); StudySite site = new StudySite(); HealthcareSite hcs = new LocalHealthcareSite(); hcs.setNCICode("NCI_CODE"); hcs.getOrganizationAssignedIdentifiers().get(0).setPrimaryIndicator(true); site.setHealthcareSite(hcs); association.addStudySite(site); child.getParentStudyAssociations().add(association); return child; } public Epoch createEpochWithArms(String epochName, String... armNames) { Epoch epoch = new Epoch(); epoch.setName(epochName); if (armNames.length == 0) { addNewArm(epoch,epochName); } else { for (String armName : armNames) { addNewArm(epoch, armName); } } return epoch; } public Epoch createEpoch(String epochName) { Epoch epoch = new Epoch(); epoch.setName(epochName); return epoch; } public void addNewArm(Epoch epoch, String armName) { Arm arm = new Arm(); arm.setName(armName); epoch.addArm(arm); } public Consent createConsent(String consentName) { Consent consent = new Consent(); consent.setName(consentName); return consent; } public Study addConsent(Study study, String name) { Consent consent = new Consent(); consent.setName(name); study.addConsent(consent); return study; } }
package org.kuali.coeus.sys.framework.security; import org.apache.commons.lang3.StringUtils; import org.kuali.rice.core.api.config.property.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Configuration; import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; import org.springframework.security.config.annotation.web.servlet.configuration.EnableWebMvcSecurity; import org.springframework.security.web.header.writers.frameoptions.XFrameOptionsHeaderWriter; import org.springframework.security.web.header.writers.frameoptions.XFrameOptionsHeaderWriter.XFrameOptionsMode; @Configuration @EnableWebMvcSecurity @Deprecated public class SpringRestSecurity extends WebSecurityConfigurerAdapter { private static final String V1_REST_SERVICES_REGEX = ".*/v1/.*"; private static final String API_REST_SERVICES_REGEX = ".*/api/.*"; private static final String ADMIN_ROLE = "ADMIN"; private static final String KC_REST_ADMIN_PASSWORD = "kc.rest.admin.password"; private static final String KC_REST_ADMIN_USERNAME = "kc.rest.admin.username"; @Autowired @Qualifier("kualiConfigurationService") private ConfigurationService configurationService; @Autowired public void configureGlobal(AuthenticationManagerBuilder auth) throws Exception { String userName = configurationService.getPropertyValueAsString(KC_REST_ADMIN_USERNAME); String password = configurationService.getPropertyValueAsString(KC_REST_ADMIN_PASSWORD); if (StringUtils.isNotBlank(userName) && StringUtils.isNotBlank(password)) { auth.inMemoryAuthentication().withUser(userName).password(password).roles(ADMIN_ROLE); } } @Override protected void configure(HttpSecurity http) throws Exception { http.csrf().disable(); http.headers().xssProtection().addHeaderWriter(new XFrameOptionsHeaderWriter(XFrameOptionsMode.SAMEORIGIN)); http.authorizeRequests().regexMatchers(V1_REST_SERVICES_REGEX).hasRole(ADMIN_ROLE).and().httpBasic(); http.authorizeRequests().regexMatchers(API_REST_SERVICES_REGEX).hasRole(ADMIN_ROLE).and().httpBasic(); } public ConfigurationService getConfigurationService() { return configurationService; } public void setConfigurationService(ConfigurationService configurationService) { this.configurationService = configurationService; } }
package org.neo4j.kernel.impl.transaction.xaframework; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.nio.channels.ReadableByteChannel; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import javax.transaction.xa.XAException; import javax.transaction.xa.Xid; import org.neo4j.kernel.Config; import org.neo4j.kernel.impl.util.ArrayMap; import org.neo4j.kernel.impl.util.FileUtils; /** * <CODE>XaLogicalLog</CODE> is a transaction and logical log combined. In * this log information about the transaction (such as started, prepared and * committed) will be written. All commands participating in the transaction * will also be written to the log. * <p> * Normally you don't have to do anything with this log except open it after it * has been instanciated (see {@link XaContainer}). The only method that may be * of use when implementing a XA compatible resource is the * {@link #getCurrentTxIdentifier}. Leave everything else be unless you know * what you're doing. * <p> * When the log is opened it will be scaned for uncompleted transactions and * those transactions will be re-created. When scan of log is complete all * transactions that hasn't entered prepared state will be marked as done * (implies rolledback) and dropped. All transactions that have been prepared * will be held in memory until the transaction manager tells them to commit. * Transaction that already started commit but didn't get flagged as done will * be re-committed. */ public class XaLogicalLog { private Logger log; private static final char CLEAN = 'C'; private static final char LOG1 = '1'; private static final char LOG2 = '2'; private FileChannel fileChannel = null; private final ByteBuffer buffer; private LogBuffer writeBuffer = null; private long previousLogLastCommittedTx = -1; private long logVersion = 0; private ArrayMap<Integer,LogEntry.Start> xidIdentMap = new ArrayMap<Integer,LogEntry.Start>( 4, false, true ); private Map<Integer,XaTransaction> recoveredTxMap = new HashMap<Integer,XaTransaction>(); private int nextIdentifier = 1; private boolean scanIsComplete = false; private String fileName = null; private final XaResourceManager xaRm; private final XaCommandFactory cf; private final XaTransactionFactory xaTf; private char currentLog = CLEAN; private boolean keepLogs = false; private boolean autoRotate = true; private long rotateAtSize = 10*1024*1024; // 10MB private boolean backupSlave = false; private boolean slave = false; private boolean useMemoryMapped = true; XaLogicalLog( String fileName, XaResourceManager xaRm, XaCommandFactory cf, XaTransactionFactory xaTf, Map<Object,Object> config ) { this.fileName = fileName; this.xaRm = xaRm; this.cf = cf; this.xaTf = xaTf; this.useMemoryMapped = getMemoryMapped( config ); log = Logger.getLogger( this.getClass().getName() + "/" + fileName ); buffer = ByteBuffer.allocateDirect( 9 + Xid.MAXGTRIDSIZE + Xid.MAXBQUALSIZE * 10 ); } private boolean getMemoryMapped( Map<Object,Object> config ) { String configValue = config != null ? (String) config.get( Config.USE_MEMORY_MAPPED_BUFFERS ) : null; return configValue != null ? Boolean.parseBoolean( configValue ) : true; } synchronized void open() throws IOException { String activeFileName = fileName + ".active"; if ( !new File( activeFileName ).exists() ) { if ( new File( fileName ).exists() ) { // old < b8 xaframework with no log rotation and we need to // do recovery on it open( fileName ); } else { open( getLog1FileName() ); setActiveLog( LOG1 ); } } else { FileChannel fc = new RandomAccessFile( activeFileName , "rw" ).getChannel(); byte bytes[] = new byte[256]; ByteBuffer buf = ByteBuffer.wrap( bytes ); int read = fc.read( buf ); fc.close(); if ( read != 4 ) { throw new IllegalStateException( "Read " + read + " bytes from " + activeFileName + " but expected 4" ); } buf.flip(); char c = buf.asCharBuffer().get(); File copy = new File( fileName + ".copy" ); safeDeleteFile( copy ); if ( c == CLEAN ) { // clean String newLog = getLog1FileName(); File file = new File( newLog ); if ( file.exists() ) { fixCleanKill( newLog ); } file = new File( getLog2FileName() ); if ( file.exists() ) { fixCleanKill( file.getPath() ); } open( newLog ); setActiveLog( LOG1 ); } else if ( c == LOG1 ) { String newLog = getLog1FileName(); if ( !new File( newLog ).exists() ) { throw new IllegalStateException( "Active marked as 1 but no " + newLog + " exist" ); } currentLog = LOG1; File otherLog = new File( getLog2FileName() ); if ( otherLog.exists() ) { if ( !otherLog.delete() ) { log.warning( "Unable to delete " + copy.getName() ); } } open( newLog ); } else if ( c == LOG2 ) { String newLog = getLog2FileName(); if ( !new File( newLog ).exists() ) { throw new IllegalStateException( "Active marked as 2 but no " + newLog + " exist" ); } File otherLog = new File( getLog1FileName() ); if ( otherLog.exists() ) { if ( !otherLog.delete() ) { log.warning( "Unable to delete " + copy.getName() ); } } currentLog = LOG2; open( newLog ); } else { throw new IllegalStateException( "Unknown active log: " + c ); } } instantiateCorrectWriteBuffer(); } private void instantiateCorrectWriteBuffer() throws IOException { if ( !useMemoryMapped ) { writeBuffer = new DirectMappedLogBuffer( fileChannel ); } else { writeBuffer = new MemoryMappedLogBuffer( fileChannel ); } } private void safeDeleteFile( File file ) { if ( file.exists() ) { if ( !file.delete() ) { log.warning( "Unable to delete " + file.getName() ); } } } private void fixCleanKill( String fileName ) throws IOException { File file = new File( fileName ); if ( !keepLogs ) { if ( !file.delete() ) { throw new IllegalStateException( "Active marked as clean and unable to delete log " + fileName ); } } else { renameCurrentLogFileAndIncrementVersion( fileName, file.length() ); } } private void open( String fileToOpen ) throws IOException { fileChannel = new RandomAccessFile( fileToOpen, "rw" ).getChannel(); if ( fileChannel.size() != 0 ) { doInternalRecovery( fileToOpen ); } else { logVersion = xaTf.getCurrentVersion(); buffer.clear(); buffer.putLong( logVersion ); long lastTxId = xaTf.getLastCommittedTx(); buffer.putLong( lastTxId ); previousLogLastCommittedTx = lastTxId; buffer.flip(); fileChannel.write( buffer ); scanIsComplete = true; } } public boolean scanIsComplete() { return scanIsComplete; } private int getNextIdentifier() { nextIdentifier++; if ( nextIdentifier < 0 ) { nextIdentifier = 1; } return nextIdentifier; } // returns identifier for transaction // [TX_START][xid[gid.length,bid.lengh,gid,bid]][identifier][format id] public synchronized int start( Xid xid ) throws XAException { if ( backupSlave ) { throw new XAException( "Resource is configured as backup slave, " + "no new transactions can be started for " + fileName + "." + currentLog ); } int xidIdent = getNextIdentifier(); try { byte globalId[] = xid.getGlobalTransactionId(); byte branchId[] = xid.getBranchQualifier(); int formatId = xid.getFormatId(); long position = writeBuffer.getFileChannelPosition(); writeBuffer.put( LogEntry.TX_START ).put( (byte) globalId.length ).put( (byte) branchId.length ).put( globalId ).put( branchId ) .putInt( xidIdent ).putInt( formatId ); xidIdentMap.put( xidIdent, new LogEntry.Start( xid, xidIdent, position ) ); } catch ( IOException e ) { throw new XAException( "Logical log couldn't start transaction: " + e ); } return xidIdent; } // [TX_PREPARE][identifier] public synchronized void prepare( int identifier ) throws XAException { assert xidIdentMap.get( identifier ) != null; try { writeBuffer.put( LogEntry.TX_PREPARE ).putInt( identifier ); writeBuffer.force(); } catch ( IOException e ) { throw new XAException( "Logical log unable to mark prepare [" + identifier + "] " + e ); } } // [TX_1P_COMMIT][identifier] public synchronized void commitOnePhase( int identifier, long txId ) throws XAException { assert xidIdentMap.get( identifier ) != null; assert txId != -1; try { writeBuffer.put( LogEntry.TX_1P_COMMIT ).putInt( identifier ).putLong( txId ); writeBuffer.force(); } catch ( IOException e ) { throw new XAException( "Logical log unable to mark 1P-commit [" + identifier + "] " + e ); } } // [DONE][identifier] public synchronized void done( int identifier ) throws XAException { if ( backupSlave ) { return; } assert xidIdentMap.get( identifier ) != null; try { writeBuffer.put( LogEntry.DONE ).putInt( identifier ); xidIdentMap.remove( identifier ); } catch ( IOException e ) { throw new XAException( "Logical log unable to mark as done [" + identifier + "] " + e ); } } // [DONE][identifier] called from XaResourceManager during internal recovery synchronized void doneInternal( int identifier ) throws IOException { buffer.clear(); buffer.put( LogEntry.DONE ).putInt( identifier ); buffer.flip(); fileChannel.write( buffer ); xidIdentMap.remove( identifier ); } // [TX_2P_COMMIT][identifier] public synchronized void commitTwoPhase( int identifier, long txId ) throws XAException { assert xidIdentMap.get( identifier ) != null; assert txId != -1; try { writeBuffer.put( LogEntry.TX_2P_COMMIT ).putInt( identifier ).putLong( txId ); writeBuffer.force(); } catch ( IOException e ) { throw new XAException( "Logical log unable to mark 2PC [" + identifier + "] " + e ); } } // [COMMAND][identifier][COMMAND_DATA] public synchronized void writeCommand( XaCommand command, int identifier ) throws IOException { checkLogRotation(); assert xidIdentMap.get( identifier ) != null; writeBuffer.put( LogEntry.COMMAND ).putInt( identifier ); command.writeToFile( writeBuffer ); // fileChannel, buffer ); } private void applyEntry( LogEntry entry ) throws IOException { if ( entry instanceof LogEntry.Start ) { applyStartEntry( (LogEntry.Start) entry ); } else if ( entry instanceof LogEntry.Prepare ) { applyPrepareEntry( (LogEntry.Prepare ) entry ); } else if ( entry instanceof LogEntry.Command ) { applyCommandEntry( (LogEntry.Command ) entry ); } else if ( entry instanceof LogEntry.OnePhaseCommit ) { applyOnePhaseCommitEntry( (LogEntry.OnePhaseCommit ) entry ); } else if ( entry instanceof LogEntry.TwoPhaseCommit ) { applyTwoPhaseCommitEntry( (LogEntry.TwoPhaseCommit ) entry ); } else if ( entry instanceof LogEntry.Done ) { applyDoneEntry( (LogEntry.Done ) entry ); } else { throw new RuntimeException( "Unrecognized log entry " + entry ); } } private void applyStartEntry( LogEntry.Start entry) throws IOException { int identifier = entry.getIdentifier(); if ( identifier >= nextIdentifier ) { nextIdentifier = (identifier + 1); } // re-create the transaction Xid xid = entry.getXid(); xidIdentMap.put( identifier, entry ); XaTransaction xaTx = xaTf.create( identifier ); xaTx.setRecovered(); recoveredTxMap.put( identifier, xaTx ); xaRm.injectStart( xid, xaTx ); } private void applyPrepareEntry( LogEntry.Prepare prepareEntry ) throws IOException { // get the tx identifier int identifier = prepareEntry.getIdentifier(); LogEntry.Start entry = xidIdentMap.get( identifier ); if ( entry == null ) { throw new IOException( "Unknown xid for identifier " + identifier ); } Xid xid = entry.getXid(); if ( xaRm.injectPrepare( xid ) ) { // read only we can remove xidIdentMap.remove( identifier ); recoveredTxMap.remove( identifier ); } } private void applyOnePhaseCommitEntry( LogEntry.OnePhaseCommit commit ) throws IOException { int identifier = commit.getIdentifier(); long txId = commit.getTxId(); LogEntry.Start entry = xidIdentMap.get( identifier ); if ( entry == null ) { throw new IOException( "Unknown xid for identifier " + identifier ); } Xid xid = entry.getXid(); try { XaTransaction xaTx = xaRm.getXaTransaction( xid ); xaTx.setCommitTxId( txId ); xaRm.injectOnePhaseCommit( xid ); } catch ( XAException e ) { e.printStackTrace(); throw new IOException( e.getMessage() ); } } private void applyDoneEntry( LogEntry.Done done ) throws IOException { // get the tx identifier int identifier = done.getIdentifier(); LogEntry.Start entry = xidIdentMap.get( identifier ); if ( entry == null ) { throw new IOException( "Unknown xid for identifier " + identifier ); } Xid xid = entry.getXid(); xaRm.pruneXid( xid ); xidIdentMap.remove( identifier ); recoveredTxMap.remove( identifier ); } private void applyTwoPhaseCommitEntry( LogEntry.TwoPhaseCommit commit ) throws IOException { int identifier = commit.getIdentifier(); long txId = commit.getTxId(); LogEntry.Start entry = xidIdentMap.get( identifier ); if ( entry == null ) { throw new IOException( "Unknown xid for identifier " + identifier ); } Xid xid = entry.getXid(); if ( xid == null ) { throw new IOException( "Xid null for identifier " + identifier ); } try { XaTransaction xaTx = xaRm.getXaTransaction( xid ); xaTx.setCommitTxId( txId ); xaRm.injectTwoPhaseCommit( xid ); } catch ( XAException e ) { e.printStackTrace(); throw new IOException( e.getMessage() ); } } private void applyCommandEntry( LogEntry.Command entry ) throws IOException { int identifier = entry.getIdentifier(); XaCommand command = entry.getXaCommand(); if ( command == null ) { throw new IOException( "Null command for identifier " + identifier ); } command.setRecovered(); XaTransaction xaTx = recoveredTxMap.get( identifier ); xaTx.injectCommand( command ); } private void checkLogRotation() throws IOException { if ( autoRotate && writeBuffer.getFileChannelPosition() >= rotateAtSize ) { long currentPos = writeBuffer.getFileChannelPosition(); long firstStartEntry = getFirstStartEntry( currentPos ); // only rotate if no huge tx is running if ( ( currentPos - firstStartEntry ) < rotateAtSize / 2 ) { rotate(); } } } private void renameCurrentLogFileAndIncrementVersion( String logFileName, long endPosition ) throws IOException { // DumpLogicalLog.main( new String[] { logFileName } ); File file = new File( logFileName ); if ( !file.exists() ) { throw new IOException( "Logical log[" + logFileName + "] not found" ); } String newName = getFileName( xaTf.getAndSetNewVersion() ); File newFile = new File( newName ); boolean renamed = FileUtils.renameFile( file, newFile ); if ( !renamed ) { throw new IOException( "Failed to rename log to: " + newName ); } else { try { FileChannel channel = new RandomAccessFile( newName, "rw" ).getChannel(); FileUtils.truncateFile( channel, endPosition ); } catch ( IOException e ) { log.log( Level.WARNING, "Failed to truncate log at correct size", e ); } } // DumpLogicalLog.main( new String[] { newName } ); } private void deleteCurrentLogFile( String logFileName ) throws IOException { File file = new File( logFileName ); if ( !file.exists() ) { throw new IOException( "Logical log[" + logFileName + "] not found" ); } boolean deleted = FileUtils.deleteFile( file ); if ( !deleted ) { log.warning( "Unable to delete clean logical log[" + logFileName + "]" ); } } private void releaseCurrentLogFile() throws IOException { if ( writeBuffer != null ) { writeBuffer.force(); writeBuffer = null; } fileChannel.close(); fileChannel = null; } public synchronized void close() throws IOException { if ( fileChannel == null || !fileChannel.isOpen() ) { log.fine( "Logical log: " + fileName + " already closed" ); return; } long endPosition = writeBuffer.getFileChannelPosition(); if ( xidIdentMap.size() > 0 ) { log.info( "Close invoked with " + xidIdentMap.size() + " running transaction(s). " ); writeBuffer.force(); writeBuffer = null; fileChannel.close(); log.info( "Dirty log: " + fileName + "." + currentLog + " now closed. Recovery will be started automatically next " + "time it is opened." ); return; } releaseCurrentLogFile(); char logWas = currentLog; if ( currentLog != CLEAN ) // again special case, see above { setActiveLog( CLEAN ); } if ( !keepLogs || backupSlave ) { if ( logWas == CLEAN ) { // special case going from old xa version with no log rotation // and we started with a recovery deleteCurrentLogFile( fileName ); } else { deleteCurrentLogFile( fileName + "." + logWas ); } } else { renameCurrentLogFileAndIncrementVersion( fileName + "." + logWas, endPosition ); } } private long[] readLogHeader( ByteBuffer buffer, ReadableByteChannel channel, boolean strict ) throws IOException { buffer.clear(); buffer.limit( 16 ); if ( channel.read( buffer ) != 16 ) { if ( strict ) { throw new IOException( "Unable to read log version and last committed tx" ); } return null; } buffer.flip(); return new long[] { buffer.getLong(), buffer.getLong() }; } private long[] readAndAssertLogHeader( ByteBuffer buffer, ReadableByteChannel channel, long expectedVersion ) throws IOException { long[] header = readLogHeader( buffer, channel, true ); if ( header[0] != expectedVersion ) { throw new IOException( "Wrong version in log. Expected " + expectedVersion + ", but got " + header[0] ); } return header; } private void doInternalRecovery( String logFileName ) throws IOException { log.info( "Non clean shutdown detected on log [" + logFileName + "]. Recovery started ..." ); // get log creation time long[] header = readLogHeader( buffer, fileChannel, false ); if ( header == null ) { log.info( "Unable to read timestamp information, " + "no records in logical log." ); fileChannel.close(); boolean success = FileUtils.renameFile( new File( logFileName ), new File( logFileName + "_unknown_timestamp_" + System.currentTimeMillis() + ".log" ) ); assert success; fileChannel = new RandomAccessFile( logFileName, "rw" ).getChannel(); return; } logVersion = header[0]; long lastCommittedTx = header[1]; previousLogLastCommittedTx = lastCommittedTx; log.fine( "Logical log version: " + logVersion + " with committed tx[" + lastCommittedTx + "]" ); long logEntriesFound = 0; long lastEntryPos = fileChannel.position(); LogEntry entry; while ( (entry = readEntry()) != null ) { applyEntry( entry ); logEntriesFound++; lastEntryPos = fileChannel.position(); } // make sure we overwrite any broken records fileChannel.position( lastEntryPos ); // zero out the slow way since windows don't support truncate very well buffer.clear(); while ( buffer.hasRemaining() ) { buffer.put( (byte)0 ); } buffer.flip(); long endPosition = fileChannel.size(); do { long bytesLeft = fileChannel.size() - fileChannel.position(); if ( bytesLeft < buffer.capacity() ) { buffer.limit( (int) bytesLeft ); } fileChannel.write( buffer ); buffer.flip(); } while ( fileChannel.position() < endPosition ); fileChannel.position( lastEntryPos ); scanIsComplete = true; log.fine( "Internal recovery completed, scanned " + logEntriesFound + " log entries." ); xaRm.checkXids(); if ( xidIdentMap.size() == 0 ) { log.fine( "Recovery completed." ); } else { log.fine( "[" + logFileName + "] Found " + xidIdentMap.size() + " prepared 2PC transactions." ); for ( LogEntry.Start startEntry : xidIdentMap.values() ) { log.fine( "[" + logFileName + "] 2PC xid[" + startEntry.getXid() + "]" ); } } recoveredTxMap.clear(); } // for testing, do not use! void reset() { xidIdentMap.clear(); recoveredTxMap.clear(); } private LogEntry readEntry() throws IOException { long position = fileChannel.position(); LogEntry entry = LogIoUtils.readEntry( buffer, fileChannel, cf ); if ( entry instanceof LogEntry.Start ) { ((LogEntry.Start) entry).setStartPosition( position ); } return entry; } private ArrayMap<Thread,Integer> txIdentMap = new ArrayMap<Thread,Integer>( 5, true, true ); void registerTxIdentifier( int identifier ) { txIdentMap.put( Thread.currentThread(), identifier ); } void unregisterTxIdentifier() { txIdentMap.remove( Thread.currentThread() ); } /** * If the current thread is committing a transaction the identifier of that * {@link XaTransaction} can be obtained invoking this method. * * @return the identifier of the transaction committing or <CODE>-1</CODE> * if current thread isn't committing any transaction */ public int getCurrentTxIdentifier() { Integer intValue = txIdentMap.get( Thread.currentThread() ); if ( intValue != null ) { return intValue; } return -1; } public ReadableByteChannel getLogicalLog( long version ) throws IOException { String name = getFileName( version ); if ( !new File( name ).exists() ) { throw new IOException( "No such log version:" + version ); } return new RandomAccessFile( name, "r" ).getChannel(); } private List<LogEntry> extractPreparedTransactionFromLog( long identifier, ReadableByteChannel log ) throws IOException { readLogHeader( buffer, log, false ); List<LogEntry> logEntryList = new ArrayList<LogEntry>(); LogEntry entry; while ( (entry = LogIoUtils.readEntry( buffer, log, cf )) != null ) { if ( entry.getIdentifier() != identifier ) { continue; } if ( entry instanceof LogEntry.Start || entry instanceof LogEntry.Command ) { logEntryList.add( entry ); } else { throw new RuntimeException( "Expected start or command entry but found: " + entry ); } } if ( logEntryList.isEmpty() ) { throw new IOException( "Transaction for internal identifier[" + identifier + "] not found in current log" ); } return logEntryList; } private List<LogEntry> extractTransactionFromLog( long txId, long expectedVersion, ReadableByteChannel log ) throws IOException { long[] header = readAndAssertLogHeader( buffer, log, expectedVersion ); long prevTxId = header[1]; assertLogCanContainTx( txId, prevTxId ); List<LogEntry> logEntryList = null; Map<Integer,List<LogEntry>> transactions = new HashMap<Integer,List<LogEntry>>(); LogEntry entry; while ( (entry = LogIoUtils.readEntry( buffer, log, cf )) != null && logEntryList == null ) { if ( entry instanceof LogEntry.Start ) { List<LogEntry> list = new LinkedList<LogEntry>(); list.add( entry ); transactions.put( entry.getIdentifier(), list ); } else if ( entry instanceof LogEntry.Commit ) { if ( ((LogEntry.Commit) entry).getTxId() == txId ) { logEntryList = transactions.get( entry.getIdentifier() ); logEntryList.add( entry ); } else { transactions.remove( entry.getIdentifier() ); } } else if ( entry instanceof LogEntry.Command ) { transactions.get( entry.getIdentifier() ).add( entry ); } else if ( entry instanceof LogEntry.Done ) { transactions.remove( entry.getIdentifier() ); } else { throw new RuntimeException( "Unknown entry: " + entry ); } } if ( logEntryList == null ) { throw new IOException( "Transaction[" + txId + "] not found in log (" + expectedVersion + ", " + prevTxId + ")" ); } return logEntryList; } private void assertLogCanContainTx( long txId, long prevTxId ) throws IOException { if ( prevTxId >= txId ) { throw new IOException( "Log says " + txId + " can not exist in this log (prev tx id=" + prevTxId + ")" ); } } private String generateUniqueName( String baseName ) { String tmpName = baseName + "-" + System.currentTimeMillis(); while ( new File( tmpName ).exists() ) { tmpName = baseName + "-" + System.currentTimeMillis() + "_"; } return tmpName; } public synchronized ReadableByteChannel getPreparedTransaction( long identifier ) throws IOException { String name = fileName + ".ptx_" + identifier; File txFile = new File( name ); if ( txFile.exists() ) { return new RandomAccessFile( name, "r" ).getChannel(); } ReadableByteChannel log = getLogicalLogOrMyself( logVersion ); List<LogEntry> logEntryList = extractPreparedTransactionFromLog( identifier, log ); log.close(); writeOutLogEntryList( logEntryList, name, "temporary-ptx-write-out-" + identifier ); return new RandomAccessFile( name, "r" ).getChannel(); } private void writeOutLogEntryList( List<LogEntry> logEntryList, String name, String tmpNameHint ) throws IOException { String tmpName = generateUniqueName( tmpNameHint ); FileChannel txLog = new RandomAccessFile( tmpName, "rw" ).getChannel(); LogBuffer buf = new DirectMappedLogBuffer( txLog ); for ( LogEntry entry : logEntryList ) { LogIoUtils.writeLogEntry( entry, buf ); } buf.force(); txLog.close(); if ( !new File( tmpName ).renameTo( new File( name ) ) ) { throw new IOException( "Failed to rename " + tmpName + " to " + name ); } } public synchronized ReadableByteChannel getCommittedTransaction( long txId ) throws IOException { // check if written out String name = fileName + ".tx_" + txId; File txFile = new File( name ); if ( txFile.exists() ) { return new RandomAccessFile( name, "r" ).getChannel(); } long version = findLogContainingTxId( txId ); System.out.println( "Found txId:" + txId + " in log version:" + version ); if ( version == -1 ) { throw new RuntimeException( "txId:" + txId + " not found in any logical log " + "(starting at " + logVersion + " and searching backwards" ); } // extract transaction ReadableByteChannel log = getLogicalLogOrMyself( version ); List<LogEntry> logEntryList = extractTransactionFromLog( txId, version, log ); log.close(); writeOutLogEntryList( logEntryList, name, "temporary-tx-write-out-" + txId ); ReadableByteChannel result = new RandomAccessFile( name, "r" ).getChannel(); return result; } private ReadableByteChannel getLogicalLogOrMyself( long version ) throws IOException { if ( version < logVersion ) { return getLogicalLog( version ); } else if ( version == logVersion ) { String currentLogName = getCurrentLogFileName(); return new RandomAccessFile( currentLogName, "r" ).getChannel(); } else { throw new RuntimeException( "Version[" + version + "] is higher then current log version[" + logVersion + "]" ); } } private String getCurrentLogFileName() { return currentLog == LOG1 ? getLog1FileName() : getLog2FileName(); } private long findLogContainingTxId( long txId ) throws IOException { long version = logVersion; long committedTx = previousLogLastCommittedTx; while ( version >= 0 ) { ReadableByteChannel log = getLogicalLogOrMyself( version ); ByteBuffer buf = ByteBuffer.allocate( 16 ); long[] header = readAndAssertLogHeader( buf, log, version ); committedTx = header[1]; log.close(); if ( committedTx <= txId ) { break; } version } return version; } public long getLogicalLogLength( long version ) { File file = new File( getFileName( version ) ); return file.exists() ? file.length() : -1; } public boolean hasLogicalLog( long version ) { return new File( getFileName( version ) ).exists(); } public boolean deleteLogicalLog( long version ) { File file = new File(getFileName( version ) ); return file.exists() ? FileUtils.deleteFile( file ) : false; } public void makeBackupSlave() { if ( xidIdentMap.size() > 0 ) { throw new IllegalStateException( "There are active transactions" ); } backupSlave = true; } private class LogApplier { private final ReadableByteChannel byteChannel; private LogEntry.Start startEntry; LogApplier( ReadableByteChannel byteChannel ) { this.byteChannel = byteChannel; } boolean readAndApplyEntry() throws IOException { LogEntry entry = LogIoUtils.readEntry( buffer, byteChannel, cf ); if ( entry != null ) { applyEntry( entry ); } return entry != null; } boolean readAndApplyAndWriteEntry( int newXidIdentifier ) throws IOException { LogEntry entry = LogIoUtils.readEntry( buffer, byteChannel, cf ); if ( entry != null ) { entry.setIdentifier( newXidIdentifier ); if ( entry instanceof LogEntry.Commit ) { // hack to get done record written after commit record LogIoUtils.writeLogEntry( entry, writeBuffer ); applyEntry( entry ); } else { if ( entry instanceof LogEntry.Start ) { startEntry = (LogEntry.Start) entry; } applyEntry( entry ); LogIoUtils.writeLogEntry( entry, writeBuffer ); } return true; } return false; } } public synchronized void applyLog( ReadableByteChannel byteChannel ) throws IOException { if ( !backupSlave ) { throw new IllegalStateException( "This is not a backup slave" ); } if ( xidIdentMap.size() > 0 ) { throw new IllegalStateException( "There are active transactions" ); } long[] header = readLogHeader( buffer, byteChannel, true ); logVersion = header[0]; long previousCommittedTx = header[1]; if ( logVersion != xaTf.getCurrentVersion() ) { throw new IllegalStateException( "Tried to apply version " + logVersion + " but expected version " + xaTf.getCurrentVersion() ); } log.fine( "Logical log version: " + logVersion + "(previous committed tx=" + previousCommittedTx + ")" ); long logEntriesFound = 0; LogApplier logApplier = new LogApplier( byteChannel ); while ( logApplier.readAndApplyEntry() ) { logEntriesFound++; } byteChannel.close(); xaTf.flushAll(); xaTf.getAndSetNewVersion(); xaRm.reset(); log.info( "Log[" + fileName + "] version " + logVersion + " applied successfully." ); } public synchronized void applyTransactionWithoutTxId( ReadableByteChannel byteChannel, long nextTxId ) throws IOException { if ( nextTxId != (xaTf.getLastCommittedTx() + 1) ) { throw new IllegalStateException( "Tried to apply tx " + nextTxId + " but expected transaction " + (xaTf.getCurrentVersion() + 1) ); } log.fine( "Logical log version: " + logVersion + ", committing tx=" + nextTxId + ")" ); // System.out.println( "applyTxWithoutTxId#start @ pos: " + writeBuffer.getFileChannelPosition() ); long logEntriesFound = 0; LogApplier logApplier = new LogApplier( byteChannel ); int xidIdent = getNextIdentifier(); while ( logApplier.readAndApplyAndWriteEntry( xidIdent ) ) { logEntriesFound++; } byteChannel.close(); LogEntry.Start entry = logApplier.startEntry; if ( entry == null ) { throw new IOException( "Unable to find start entry" ); } // System.out.println( "applyTxWithoutTxId#before 1PC @ pos: " + writeBuffer.getFileChannelPosition() ); LogEntry.OnePhaseCommit commit = new LogEntry.OnePhaseCommit( xidIdent, nextTxId ); LogIoUtils.writeLogEntry( commit, writeBuffer ); Xid xid = entry.getXid(); try { XaTransaction xaTx = xaRm.getXaTransaction( xid ); xaTx.setCommitTxId( nextTxId ); xaRm.commit( xid, true ); } catch ( XAException e ) { e.printStackTrace(); throw new IOException( e.getMessage() ); } // LogEntry.Done done = new LogEntry.Done( entry.getIdentifier() ); // LogIoUtils.writeLogEntry( done, writeBuffer ); // xaTf.setLastCommittedTx( nextTxId ); // done in doCommit log.info( "Tx[" + nextTxId + "] " + " applied successfully." ); // System.out.println( "applyTxWithoutTxId#end @ pos: " + writeBuffer.getFileChannelPosition() ); } public synchronized void applyTransaction( ReadableByteChannel byteChannel ) throws IOException { // System.out.println( "applyFullTx#start @ pos: " + writeBuffer.getFileChannelPosition() ); long logEntriesFound = 0; LogApplier logApplier = new LogApplier( byteChannel ); int xidIdent = getNextIdentifier(); while ( logApplier.readAndApplyAndWriteEntry( xidIdent ) ) { logEntriesFound++; } byteChannel.close(); // System.out.println( "applyFullTx#end @ pos: " + writeBuffer.getFileChannelPosition() ); } private String getLog1FileName() { return fileName + ".1"; } private String getLog2FileName() { return fileName + ".2"; } public synchronized void rotate() throws IOException { xaTf.flushAll(); String newLogFile = getLog2FileName(); String currentLogFile = getLog1FileName(); char newActiveLog = LOG2; long currentVersion = xaTf.getCurrentVersion(); String oldCopy = getFileName( currentVersion ); if ( currentLog == CLEAN || currentLog == LOG2 ) { newActiveLog = LOG1; newLogFile = getLog1FileName(); currentLogFile = getLog2FileName(); } else { assert currentLog == LOG1; } assertFileDoesntExist( newLogFile, "New log file" ); assertFileDoesntExist( oldCopy, "Copy log file" ); // DumpLogicalLog.main( new String[] { currentLogFile } ); long endPosition = writeBuffer.getFileChannelPosition(); writeBuffer.force(); FileChannel newLog = new RandomAccessFile( newLogFile, "rw" ).getChannel(); buffer.clear(); buffer.putLong( currentVersion + 1 ); long lastTx = xaTf.getLastCommittedTx(); buffer.putLong( lastTx ).flip(); previousLogLastCommittedTx = lastTx; if ( newLog.write( buffer ) != 16 ) { throw new IOException( "Unable to write log version to new" ); } fileChannel.position( 0 ); long[] header = readAndAssertLogHeader( buffer, fileChannel, currentVersion ); if ( xidIdentMap.size() > 0 ) { fileChannel.position( getFirstStartEntry( endPosition ) ); } LogEntry entry; while ((entry = LogIoUtils.readEntry( buffer, fileChannel, cf )) != null ) { if ( xidIdentMap.get( entry.getIdentifier() ) != null ) { if ( entry instanceof LogEntry.Start ) { ((LogEntry.Start) entry).setStartPosition( newLog.position() ); } LogBuffer newLogBuffer = new DirectLogBuffer( newLog, buffer ); LogIoUtils.writeLogEntry( entry, newLogBuffer ); } } newLog.force( false ); releaseCurrentLogFile(); setActiveLog( newActiveLog ); if ( keepLogs ) { renameCurrentLogFileAndIncrementVersion( currentLogFile, endPosition ); } else { deleteCurrentLogFile( currentLogFile ); xaTf.getAndSetNewVersion(); } if ( xaTf.getCurrentVersion() != ( currentVersion + 1 ) ) { throw new IOException( "version change failed" ); } fileChannel = newLog; instantiateCorrectWriteBuffer(); } private void assertFileDoesntExist( String file, String description ) throws IOException { if ( new File( file ).exists() ) { throw new IOException( description + ": " + file + " already exist" ); } } private long getFirstStartEntry( long endPosition ) { long firstEntryPosition = endPosition; for ( LogEntry.Start entry : xidIdentMap.values() ) { if ( entry.getStartPosition() < firstEntryPosition ) { assert entry.getStartPosition() > 0; firstEntryPosition = entry.getStartPosition(); } } return firstEntryPosition; } private void setActiveLog( char c ) throws IOException { if ( c != CLEAN && c != LOG1 && c != LOG2 ) { throw new IllegalArgumentException( "Log must be either clean, " + "1 or 2" ); } if ( c == currentLog ) { throw new IllegalStateException( "Log should not be equal to " + "current " + currentLog ); } ByteBuffer bb = ByteBuffer.wrap( new byte[4] ); bb.asCharBuffer().put( c ).flip(); FileChannel fc = new RandomAccessFile( fileName + ".active" , "rw" ).getChannel(); int wrote = fc.write( bb ); if ( wrote != 4 ) { throw new IllegalStateException( "Expected to write 4 -> " + wrote ); } fc.force( false ); fc.close(); currentLog = c; } public void setKeepLogs( boolean keep ) { this.keepLogs = keep; } public boolean isLogsKept() { return this.keepLogs; } public void setAutoRotateLogs( boolean autoRotate ) { this.autoRotate = autoRotate; } public boolean isLogsAutoRotated() { return this.autoRotate; } public void setLogicalLogTargetSize( long size ) { this.rotateAtSize = size; } public long getLogicalLogTargetSize() { return this.rotateAtSize; } public String getFileName( long version ) { return fileName + ".v" + version; } }
package org.cytoscape.task.internal.creation; import java.util.HashMap; import java.util.List; import java.util.Map; import org.cytoscape.event.CyEventHelper; import org.cytoscape.model.CyColumn; import org.cytoscape.model.CyEdge; import org.cytoscape.model.CyNetwork; import org.cytoscape.model.CyNetworkFactory; import org.cytoscape.model.CyNetworkManager; import org.cytoscape.model.CyNode; import org.cytoscape.model.CyRow; import org.cytoscape.model.CyTable; import org.cytoscape.model.CyTableEntry; import org.cytoscape.session.CyNetworkNaming; import org.cytoscape.view.model.CyNetworkView; import org.cytoscape.view.model.CyNetworkViewFactory; import org.cytoscape.view.model.CyNetworkViewManager; import org.cytoscape.view.model.View; import org.cytoscape.view.presentation.property.MinimalVisualLexicon; import org.cytoscape.view.presentation.property.RichVisualLexicon; import org.cytoscape.view.vizmap.VisualMappingManager; import org.cytoscape.work.TaskMonitor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class CloneNetworkTask extends AbstractCreationTask { private static final Logger logger = LoggerFactory.getLogger(CloneNetworkTask.class); private Map<CyNode, CyNode> orig2NewNodeMap; private final VisualMappingManager vmm; private final CyNetworkFactory netFactory; private final CyNetworkViewFactory netViewFactory; private final CyNetworkNaming naming; private final CyEventHelper eventHelper; public CloneNetworkTask(final CyNetwork net, final CyNetworkManager netmgr, final CyNetworkViewManager networkViewManager, final VisualMappingManager vmm, final CyNetworkFactory netFactory, final CyNetworkViewFactory netViewFactory, final CyNetworkNaming naming, final CyEventHelper eventHelper) { super(net, netmgr, networkViewManager); this.vmm = vmm; this.netFactory = netFactory; this.netViewFactory = netViewFactory; this.naming = naming; this.eventHelper = eventHelper; } public void run(TaskMonitor tm) { tm.setProgress(0.0); final long start = System.currentTimeMillis(); logger.debug("Clone Network Task start"); // Create copied network model final CyNetwork newNet = cloneNetwork(parentNetwork); tm.setProgress(0.4); final CyNetworkView origView = networkViewManager.getNetworkView(parentNetwork); networkManager.addNetwork(newNet); tm.setProgress(0.6); if (origView != null) copyView(newNet, origView); tm.setProgress(0.9); orig2NewNodeMap.clear(); orig2NewNodeMap = null; logger.debug("Cloning finished in " + (System.currentTimeMillis() - start) + " msec."); tm.setProgress(1.0); } private CyNetwork cloneNetwork(CyNetwork origNet) { final CyNetwork newNet = netFactory.createNetwork(); final CyTable nodeTable = newNet.getDefaultNodeTable(); final CyTable edgeTable = newNet.getDefaultEdgeTable(); final CyTable networkTable = newNet.getDefaultNetworkTable(); // copy default columns cloneColumns(origNet.getDefaultNodeTable(), nodeTable); cloneColumns(origNet.getDefaultEdgeTable(), edgeTable); cloneColumns(origNet.getDefaultNetworkTable(), networkTable); cloneNodes(origNet, newNet); cloneEdges(origNet, newNet); newNet.getRow(newNet).set(CyTableEntry.NAME, naming.getSuggestedNetworkTitle(origNet.getRow(origNet).get(CyTableEntry.NAME, String.class))); return newNet; } private void cloneNodes(CyNetwork origNet, CyNetwork newNet) { orig2NewNodeMap = new HashMap<CyNode, CyNode>(); for (final CyNode origNode : origNet.getNodeList()) { final CyNode newNode = newNet.addNode(); orig2NewNodeMap.put(origNode, newNode); cloneRow(origNet.getRow(origNode), newNet.getRow(newNode)); } } private void cloneEdges(CyNetwork origNet, CyNetwork newNet) { for (final CyEdge origEdge : origNet.getEdgeList()) { final CyNode newSource = orig2NewNodeMap.get(origEdge.getSource()); final CyNode newTarget = orig2NewNodeMap.get(origEdge.getTarget()); final boolean newDirected = origEdge.isDirected(); final CyEdge newEdge = newNet.addEdge(newSource, newTarget, newDirected); cloneRow(origNet.getRow(origEdge), newNet.getRow(newEdge)); } } private void cloneColumns(final CyTable from, final CyTable to) { for (final CyColumn fromColumn : from.getColumns()) { final CyColumn toColumn = to.getColumn(fromColumn.getName()); if (toColumn == null) { if (List.class.isAssignableFrom(fromColumn.getType())) to.createListColumn(fromColumn.getName(), fromColumn.getListElementType(), false); else to.createColumn(fromColumn.getName(), fromColumn.getType(), false); } else if (toColumn.getType() == fromColumn.getType()) { continue; } else { throw new IllegalArgumentException("column of same name: " + fromColumn.getName() + "but types don't match (orig): " + fromColumn.getType().getName() + " (new): " + toColumn.getType().getName()); } } } private void cloneRow(final CyRow from, final CyRow to) { for (final CyColumn column : from.getTable().getColumns()) to.set(column.getName(), from.getRaw(column.getName())); } /** * Copy Visual Properties to the new network view. * */ private void copyView(final CyNetwork newNet, final CyNetworkView origView) { final CyNetworkView newView = netViewFactory.createNetworkView(newNet); // Copy node locations since this is controlled outside of visual style. for (final View<CyNode> origNodeView : origView.getNodeViews()) { final CyNode node = origNodeView.getModel(); final View<CyNode> newNodeView = newView.getNodeView(orig2NewNodeMap.get(node)); newNodeView.setVisualProperty(MinimalVisualLexicon.NODE_X_LOCATION, origNodeView.getVisualProperty(MinimalVisualLexicon.NODE_X_LOCATION)); newNodeView.setVisualProperty(MinimalVisualLexicon.NODE_Y_LOCATION, origNodeView.getVisualProperty(MinimalVisualLexicon.NODE_Y_LOCATION)); newNodeView.setVisualProperty(RichVisualLexicon.NODE_Z_LOCATION, origNodeView.getVisualProperty(RichVisualLexicon.NODE_Z_LOCATION)); } vmm.setVisualStyle(vmm.getVisualStyle(origView), newView); vmm.getVisualStyle(origView).apply(newView); networkViewManager.addNetworkView(newView); newView.fitContent(); } }
// Neighborhood3x3Operation.java package imagej.core.plugins.neigh; import imagej.data.Dataset; import imagej.util.Index; import imagej.util.Rect; import net.imglib2.Cursor; import net.imglib2.RandomAccess; import net.imglib2.RandomAccessible; import net.imglib2.img.Img; import net.imglib2.type.numeric.RealType; import net.imglib2.view.Views; /** * Neighborhood3x3Operation - a helper class for 3x3 neighborhood operation * plugins such as SmoothDataValues, SharpenDataValues, and FindEdges. Does * the work of communicating with a Neighborhood3x3Watcher. * * @author Barry DeZonia */ public class Neighborhood3x3Operation { // -- instance variables -- private Dataset input; private Img<? extends RealType<?>> inputImage; private Img<? extends RealType<?>> inputImageCopy; private Rect selection; private Neighborhood3x3Watcher watcher; // -- constructor -- public Neighborhood3x3Operation(Dataset input, Neighborhood3x3Watcher watcher) { this.input = input; this.watcher = watcher; if (watcher == null) throw new IllegalArgumentException( "neighborhood watcher cannot be null!"); } // -- public interface -- public void run() { checkInput(); setupWorkingData(); runAssignment(); } // -- private interface -- /** * make sure we have an input image and that it's dimensionality is correct */ private void checkInput() { if (input == null) throw new IllegalArgumentException("input Dataset is null"); if (input.getImgPlus() == null) throw new IllegalArgumentException("input Img is null"); //if (input.getImage().numDimensions() != 2) } private void setupWorkingData() { inputImage = input.getImgPlus(); inputImageCopy = cloneImage(inputImage); selection = input.getSelection(); } private void runAssignment() { long[] planeDims = new long[inputImage.numDimensions() - 2]; for (int i = 0; i < planeDims.length; i++) planeDims[i] = inputImage.dimension(i+2); long[] planeIndex = new long[planeDims.length]; long totalPlanes = Index.getTotalLength(planeDims); for (long plane = 0; plane < totalPlanes; plane++) { Index.index1DtoND(planeDims, plane, planeIndex); applyOperationToPlane(planeIndex); } input.update(); } private void applyOperationToPlane(long[] planeIndex) { long[] imageDims = new long[inputImage.numDimensions()]; inputImage.dimensions(imageDims); if (selection.width == 0) selection.width = (int) imageDims[0]; if (selection.height == 0) selection.height = (int) imageDims[1]; // output is done by changin input image in place RandomAccess<? extends RealType<?>> outputAccessor = inputImage.randomAccess(); // input is a copy of the original data with out of bounds access enabled RandomAccessible<? extends RealType<?>> inputInterval = Views.extendMirrorSingle(inputImageCopy); RandomAccess<? extends RealType<?>> extendedInput = inputInterval.randomAccess(); // initialize the watcher watcher.setup(); long[] inputPosition = new long[imageDims.length]; long[] localInputPosition = new long[imageDims.length]; for (int i = 2; i < inputPosition.length; i++) { inputPosition[i] = planeIndex[i-2]; localInputPosition[i] = planeIndex[i-2]; } for (long y = selection.y; y < selection.height; y++) { inputPosition[1] = y; for (long x = selection.x; x < selection.width; x++) { inputPosition[0] = x; watcher.initializeNeighborhood(inputPosition); for (int dy = -1; dy <= 1; dy++) { localInputPosition[1] = inputPosition[1] + dy; for (int dx = -1; dx <= 1; dx++) { localInputPosition[0] = inputPosition[0] + dx; extendedInput.setPosition(localInputPosition); double localValue = extendedInput.get().getRealDouble(); watcher.visitLocation(dx, dy, localValue); } } // assign output outputAccessor.setPosition(inputPosition); outputAccessor.get().setReal(watcher.calcOutputValue()); } } } // TODO - eliminate when Imglib allows ability to duplicate/clone an Img // TODO - find a way to eliminate use of raw types here @SuppressWarnings({ "rawtypes", "unchecked" }) private Img<? extends RealType<?>> cloneImage(Img image) { // TODO - used to be able to call Image::clone() // For now copy data by hand long[] dimensions = new long[image.numDimensions()]; image.dimensions(dimensions); Img<? extends RealType<?>> copyOfImg = image.factory().create(dimensions, image.firstElement()); long[] position = new long[dimensions.length]; Cursor<? extends RealType<?>> cursor = image.cursor(); RandomAccess<? extends RealType<?>> access = copyOfImg.randomAccess(); while (cursor.hasNext()) { cursor.next(); double currValue = cursor.get().getRealDouble(); for (int i = 0; i < position.length; i++) position[i] = cursor.getLongPosition(i); access.setPosition(position); access.get().setReal(currValue); } return copyOfImg; } }
package org.hisp.dhis.android.core.datavalue.internal; import org.hisp.dhis.android.core.arch.api.executors.internal.APICallExecutor; import org.hisp.dhis.android.core.arch.call.D2Progress; import org.hisp.dhis.android.core.arch.call.internal.D2ProgressManager; import org.hisp.dhis.android.core.common.State; import org.hisp.dhis.android.core.datavalue.DataValue; import org.hisp.dhis.android.core.imports.internal.DataValueImportSummary; import org.hisp.dhis.android.core.systeminfo.SystemInfo; import org.hisp.dhis.android.core.systeminfo.internal.SystemInfoModuleDownloader; import java.util.ArrayList; import java.util.Collection; import javax.inject.Inject; import androidx.annotation.NonNull; import dagger.Reusable; import io.reactivex.Observable; import io.reactivex.Single; @Reusable public final class DataValuePostCall { private final DataValueService dataValueService; private final DataValueStore dataValueStore; private final APICallExecutor apiCallExecutor; private final SystemInfoModuleDownloader systemInfoDownloader; @Inject DataValuePostCall(@NonNull DataValueService dataValueService, @NonNull DataValueStore dataValueSetStore, @NonNull APICallExecutor apiCallExecutor, @NonNull SystemInfoModuleDownloader systemInfoDownloader) { this.dataValueService = dataValueService; this.dataValueStore = dataValueSetStore; this.apiCallExecutor = apiCallExecutor; this.systemInfoDownloader = systemInfoDownloader; } public Observable<D2Progress> uploadDataValues() { return Observable.defer(() -> { Collection<DataValue> toPostDataValues = new ArrayList<>(); appendPostableDataValues(toPostDataValues); appendUpdatableDataValues(toPostDataValues); if (toPostDataValues.isEmpty()) { return Observable.empty(); } else { D2ProgressManager progressManager = new D2ProgressManager(2); Single<D2Progress> systemInfoDownload = systemInfoDownloader.downloadMetadata().toSingle(() -> progressManager.increaseProgress(SystemInfo.class, false)); return systemInfoDownload.flatMapObservable(systemInfoProgress -> Observable.create(emitter -> { DataValueSet dataValueSet = new DataValueSet(toPostDataValues); DataValueImportSummary dataValueImportSummary = apiCallExecutor.executeObjectCall( dataValueService.postDataValues(dataValueSet)); handleImportSummary(dataValueSet, dataValueImportSummary); emitter.onNext(progressManager.increaseProgress(DataValue.class, true)); emitter.onComplete(); })); } }); } private void appendPostableDataValues(Collection<DataValue> dataValues) { dataValues.addAll(dataValueStore.getDataValuesWithState(State.TO_POST)); } private void appendUpdatableDataValues(Collection<DataValue> dataValues) { dataValues.addAll(dataValueStore.getDataValuesWithState(State.TO_UPDATE)); } private void handleImportSummary(DataValueSet dataValueSet, DataValueImportSummary dataValueImportSummary) { DataValueImportHandler dataValueImportHandler = new DataValueImportHandler(dataValueStore); dataValueImportHandler.handleImportSummary(dataValueSet, dataValueImportSummary); } }
package cruise.umple.implementation; import org.junit.*; public class RequirementFullCheckWithCommentsTest extends TemplateTest { @Test public void X() { assertUmpleTemplateFor("RequirementFullCheckWithComments.ump", languagePath + "/RequirementFullCheckWithComments_X."+ languagePath +".txt", "X"); } @Test public void Y() { assertUmpleTemplateFor("RequirementFullCheckWithComments.ump", languagePath + "/RequirementFullCheckWithComments_Y."+ languagePath +".txt", "Y"); } }
package com.ctrip.platform.dal.dao.client; import com.ctrip.platform.dal.dao.configure.*; import com.ctrip.platform.dal.dao.task.DalRequestExecutor; import com.ctrip.platform.dal.dao.task.DalRequestExecutorUtils; import com.ctrip.platform.dal.dao.task.DalThreadPoolExecutor; import org.junit.Assert; import org.junit.Test; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import static org.junit.Assert.*; public class DalConfigureFactoryTest { @Test public void testLoad() { // Test load from dal.xml DalConfigure configure = null; try { configure = DalConfigureFactory.load(); assertNotNull(configure); } catch (Exception e) { e.printStackTrace(); fail(); } DatabaseSet databaseSet = configure.getDatabaseSet("clusterName1"); assertTrue(databaseSet instanceof ClusterDatabaseSet); assertEquals("clusterName1".toLowerCase(), ((ClusterDatabaseSet) databaseSet).getCluster().getClusterName()); try { configure.getDatabaseSet("clusterName1".toLowerCase()); fail(); } catch (Exception e) { e.printStackTrace(); } try { configure.getDatabaseSet("clusterName2"); fail(); } catch (Exception e) { e.printStackTrace(); } databaseSet = configure.getDatabaseSet("DbSetName"); assertTrue(databaseSet instanceof ClusterDatabaseSet); assertEquals("clusterName2".toLowerCase(), ((ClusterDatabaseSet) databaseSet).getCluster().getClusterName()); try { configure.getDatabaseSet("DbSetName".toLowerCase()); fail(); } catch (Exception e) { e.printStackTrace(); } } // @Test public void testThreadPoolConfig() throws Exception { DalRequestExecutor.shutdown(); DalConfigure configure = DalConfigureFactory.load(Thread.currentThread().getContextClassLoader(). getResource("dal-thread-pool-test.xml")); DalRequestExecutor.init(configure); ExecutorService executor = DalRequestExecutorUtils.getExecutor(); Assert.assertTrue(executor instanceof DalThreadPoolExecutor); DalThreadPoolExecutorConfig executorConfig = DalRequestExecutorUtils.getExecutorConfig((DalThreadPoolExecutor) executor); Assert.assertEquals(5, executorConfig.getMaxThreadsPerShard("dao_test_mod_mysql")); Assert.assertEquals(0, executorConfig.getMaxThreadsPerShard("clusterName1")); Assert.assertEquals(0, executorConfig.getMaxThreadsPerShard("clusterName2")); Assert.assertEquals(3, executorConfig.getMaxThreadsPerShard("DbSetName")); Assert.assertFalse(((DalThreadPoolExecutor) executor).allowsCoreThreadTimeOut()); Assert.assertEquals(1000, ((DalThreadPoolExecutor) executor).getQueue().remainingCapacity()); DalRequestExecutor.shutdown(); } }
package io.digdag.standards.operator.aws; import com.amazonaws.AmazonServiceException; import com.amazonaws.ClientConfiguration; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.auth.BasicSessionCredentials; import com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduce; import com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduceClient; import com.amazonaws.services.elasticmapreduce.model.AddJobFlowStepsRequest; import com.amazonaws.services.elasticmapreduce.model.AddJobFlowStepsResult; import com.amazonaws.services.elasticmapreduce.model.Application; import com.amazonaws.services.elasticmapreduce.model.BootstrapActionConfig; import com.amazonaws.services.elasticmapreduce.model.ClusterStatus; import com.amazonaws.services.elasticmapreduce.model.Configuration; import com.amazonaws.services.elasticmapreduce.model.DescribeClusterRequest; import com.amazonaws.services.elasticmapreduce.model.DescribeClusterResult; import com.amazonaws.services.elasticmapreduce.model.DescribeStepRequest; import com.amazonaws.services.elasticmapreduce.model.EbsBlockDeviceConfig; import com.amazonaws.services.elasticmapreduce.model.EbsConfiguration; import com.amazonaws.services.elasticmapreduce.model.FailureDetails; import com.amazonaws.services.elasticmapreduce.model.InstanceGroupConfig; import com.amazonaws.services.elasticmapreduce.model.JobFlowInstancesConfig; import com.amazonaws.services.elasticmapreduce.model.ListStepsRequest; import com.amazonaws.services.elasticmapreduce.model.ListStepsResult; import com.amazonaws.services.elasticmapreduce.model.PlacementType; import com.amazonaws.services.elasticmapreduce.model.RunJobFlowRequest; import com.amazonaws.services.elasticmapreduce.model.RunJobFlowResult; import com.amazonaws.services.elasticmapreduce.model.ScriptBootstrapActionConfig; import com.amazonaws.services.elasticmapreduce.model.Step; import com.amazonaws.services.elasticmapreduce.model.StepConfig; import com.amazonaws.services.elasticmapreduce.model.StepStateChangeReason; import com.amazonaws.services.elasticmapreduce.model.StepStatus; import com.amazonaws.services.elasticmapreduce.model.StepSummary; import com.amazonaws.services.elasticmapreduce.model.Tag; import com.amazonaws.services.elasticmapreduce.model.VolumeSpecification; import com.amazonaws.services.elasticmapreduce.util.StepFactory; import com.amazonaws.services.kms.AWSKMSClient; import com.amazonaws.services.kms.model.EncryptRequest; import com.amazonaws.services.kms.model.EncryptResult; import com.amazonaws.services.s3.AmazonS3Client; import com.amazonaws.services.s3.AmazonS3URI; import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.model.ListObjectsRequest; import com.amazonaws.services.s3.model.ObjectListing; import com.amazonaws.services.s3.model.ObjectMetadata; import com.amazonaws.services.s3.model.PutObjectRequest; import com.amazonaws.services.s3.model.S3ObjectSummary; import com.amazonaws.services.s3.transfer.TransferManager; import com.amazonaws.services.s3.transfer.Upload; import com.amazonaws.services.securitytoken.AWSSecurityTokenServiceClient; import com.amazonaws.services.securitytoken.model.AssumeRoleRequest; import com.amazonaws.services.securitytoken.model.AssumeRoleResult; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.google.api.client.repackaged.com.google.common.base.Splitter; import com.google.api.client.repackaged.com.google.common.base.Throwables; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.io.BaseEncoding; import com.google.common.io.Resources; import com.google.inject.Inject; import io.digdag.client.config.Config; import io.digdag.client.config.ConfigElement; import io.digdag.client.config.ConfigException; import io.digdag.client.config.ConfigFactory; import io.digdag.client.config.ConfigKey; import io.digdag.core.Environment; import io.digdag.spi.ImmutableTaskResult; import io.digdag.spi.Operator; import io.digdag.spi.OperatorFactory; import io.digdag.spi.SecretProvider; import io.digdag.spi.TaskExecutionContext; import io.digdag.spi.TaskExecutionException; import io.digdag.spi.TaskRequest; import io.digdag.spi.TaskResult; import io.digdag.spi.TemplateEngine; import io.digdag.spi.TemplateException; import io.digdag.standards.operator.DurationInterval; import io.digdag.standards.operator.state.TaskState; import io.digdag.util.BaseOperator; import io.digdag.util.RetryExecutor; import io.digdag.util.Workspace; import org.immutables.value.Value; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.ByteArrayInputStream; import java.io.IOException; import java.net.URL; import java.nio.ByteBuffer; import java.nio.file.Path; import java.nio.file.Paths; import java.time.Duration; import java.util.ArrayList; import java.util.Base64; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ThreadLocalRandom; import java.util.function.BiFunction; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import static com.google.common.io.Closeables.closeQuietly; import static io.digdag.standards.operator.aws.EmrOperatorFactory.FileReference.Type.DIRECT; import static io.digdag.standards.operator.aws.EmrOperatorFactory.FileReference.Type.LOCAL; import static io.digdag.standards.operator.aws.EmrOperatorFactory.FileReference.Type.S3; import static io.digdag.standards.operator.state.PollingRetryExecutor.pollingRetryExecutor; import static io.digdag.standards.operator.state.PollingWaiter.pollingWaiter; import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.Arrays.asList; import static java.util.stream.Collectors.toList; import static java.util.stream.Collectors.toMap; public class EmrOperatorFactory implements OperatorFactory { private static final int LIST_STEPS_MAX_IDS = 10; private static final String LOCAL_STAGING_DIR = "/home/hadoop/digdag-staging"; private static Logger logger = LoggerFactory.getLogger(EmrOperatorFactory.class); public String getType() { return "emr"; } private final TemplateEngine templateEngine; private final ObjectMapper objectMapper; private final ConfigFactory cf; private final Map<String, String> environment; @Inject public EmrOperatorFactory(TemplateEngine templateEngine, ObjectMapper objectMapper, ConfigFactory cf, @Environment Map<String, String> environment) { this.templateEngine = templateEngine; this.objectMapper = objectMapper; this.cf = cf; this.environment = environment; } @Override public Operator newOperator(Path projectPath, TaskRequest request) { return new EmrOperator(projectPath, request); } private class EmrOperator extends BaseOperator { private final TaskState state; private final Config params; private final String defaultActionOnFailure; EmrOperator(Path projectPath, TaskRequest request) { super(projectPath, request); this.state = TaskState.of(request); this.params = request.getConfig() .mergeDefault(request.getConfig().getNestedOrGetEmpty("aws").getNestedOrGetEmpty("emr")) .mergeDefault(request.getConfig().getNestedOrGetEmpty("aws")); this.defaultActionOnFailure = params.get("action_on_failure", String.class, "CANCEL_AND_WAIT"); } @Override public List<String> secretSelectors() { // TODO: extract secret references from step configurations return ImmutableList.of("*"); } @Override public TaskResult run(TaskExecutionContext ctx) { String tag = state.constant("tag", String.class, EmrOperatorFactory::randomTag); AWSCredentials credentials = credentials(tag, ctx); SecretProvider awsSecrets = ctx.secrets().getSecrets("aws"); Optional<String> emrEndpoint = awsSecrets.getSecretOptional("emr.endpoint"); Optional<String> s3Endpoint = awsSecrets.getSecretOptional("s3.endpoint"); Optional<String> kmsEndpoint = awsSecrets.getSecretOptional("kms.endpoint"); ClientConfiguration emrClientConfiguration = new ClientConfiguration(); ClientConfiguration s3ClientConfiguration = new ClientConfiguration(); ClientConfiguration kmsClientConfiguration = new ClientConfiguration(); Aws.configureProxy(emrClientConfiguration, emrEndpoint, environment); Aws.configureProxy(s3ClientConfiguration, s3Endpoint, environment); Aws.configureProxy(kmsClientConfiguration, kmsEndpoint, environment); AmazonElasticMapReduce emr = new AmazonElasticMapReduceClient(credentials, emrClientConfiguration); AmazonS3Client s3 = new AmazonS3Client(credentials, s3ClientConfiguration); AWSKMSClient kms = new AWSKMSClient(credentials, kmsClientConfiguration); // Set up file stager Optional<AmazonS3URI> staging = params.getOptional("staging", String.class).transform(s -> { try { return new AmazonS3URI(s); } catch (IllegalArgumentException ex) { throw new ConfigException("Invalid staging uri: '" + s + "'", ex); } }); Filer filer = new Filer(s3, staging, workspace, templateEngine, params); // TODO: make it possible for operators to _reliably_ clean up boolean cleanup = false; try { TaskResult result = run(tag, emr, s3, kms, ctx, filer); cleanup = true; return result; } catch (Throwable t) { boolean retry = t instanceof TaskExecutionException && ((TaskExecutionException) t).getRetryInterval().isPresent(); cleanup = !retry; throw Throwables.propagate(t); } finally { if (cleanup) { // Best effort clean up of staging try { filer.tryCleanup(); } catch (Throwable t) { logger.warn("Failed to clean up staging: {}", staging, t); } } s3.shutdown(); emr.shutdown(); } } private AWSCredentials credentials(String tag, TaskExecutionContext ctx) { SecretProvider awsSecrets = ctx.secrets().getSecrets("aws"); SecretProvider emrSecrets = awsSecrets.getSecrets("emr"); String accessKeyId = emrSecrets.getSecretOptional("access_key_id") .or(() -> awsSecrets.getSecret("access_key_id")); String secretAccessKey = emrSecrets.getSecretOptional("secret_access_key") .or(() -> awsSecrets.getSecret("secret_access_key")); AWSCredentials credentials = new BasicAWSCredentials(accessKeyId, secretAccessKey); Optional<String> roleArn = emrSecrets.getSecretOptional("role_arn") .or(awsSecrets.getSecretOptional("role_arn")); if (!roleArn.isPresent()) { return credentials; } // use STS to assume role String roleSessionName = emrSecrets.getSecretOptional("role_session_name") .or(awsSecrets.getSecretOptional("role_session_name")) .or("digdag-emr-" + tag); AWSSecurityTokenServiceClient stsClient = new AWSSecurityTokenServiceClient(credentials); AssumeRoleResult assumeResult = stsClient.assumeRole(new AssumeRoleRequest() .withRoleArn(roleArn.get()) .withDurationSeconds(3600) .withRoleSessionName(roleSessionName)); return new BasicSessionCredentials( assumeResult.getCredentials().getAccessKeyId(), assumeResult.getCredentials().getSecretAccessKey(), assumeResult.getCredentials().getSessionToken()); } private TaskResult run(String tag, AmazonElasticMapReduce emr, AmazonS3Client s3, AWSKMSClient kms, TaskExecutionContext ctx, Filer filer) throws IOException { ParameterCompiler parameterCompiler = new ParameterCompiler(kms, ctx, cf); // Set up step compiler List<Config> steps = params.getListOrEmpty("steps", Config.class); StepCompiler stepCompiler = new StepCompiler(tag, steps, filer, parameterCompiler, objectMapper, defaultActionOnFailure); // Set up job submitter Submitter submitter; Config cluster = null; try { cluster = params.parseNestedOrGetEmpty("cluster"); } catch (ConfigException ignore) { } if (cluster != null) { // Create a new cluster submitter = newClusterSubmitter(emr, tag, stepCompiler, cluster, filer, parameterCompiler); } else { // Cluster ID? Use existing cluster. String clusterId = params.get("cluster", String.class); submitter = existingClusterSubmitter(emr, tag, stepCompiler, clusterId, filer); } // Submit EMR job SubmissionResult submission = submitter.submit(); // Wait for the steps to finish running if (!steps.isEmpty()) { waitForSteps(emr, submission); } return result(submission); } private void waitForSteps(AmazonElasticMapReduce emr, SubmissionResult submission) { String lastStepId = Iterables.getLast(submission.stepIds()); pollingWaiter(state, "result") .withWaitMessage("EMR job still running: %s", submission.clusterId()) .withPollInterval(DurationInterval.of(Duration.ofSeconds(15), Duration.ofMinutes(5))) .awaitOnce(Step.class, pollState -> checkStepCompletion(emr, submission, lastStepId, pollState)); } private Optional<Step> checkStepCompletion(AmazonElasticMapReduce emr, SubmissionResult submission, String lastStepId, TaskState pollState) { return pollingRetryExecutor(pollState, "poll") .retryUnless(AmazonServiceException.class, Aws::isDeterministicException) .withRetryInterval(DurationInterval.of(Duration.ofSeconds(15), Duration.ofMinutes(5))) .run(s -> { ListStepsResult runningSteps = emr.listSteps(new ListStepsRequest() .withClusterId(submission.clusterId()) .withStepStates("RUNNING")); runningSteps.getSteps().stream().findFirst() .ifPresent(step -> { int stepIndex = submission.stepIds().indexOf(step.getId()); logger.info("Currently running EMR step {}/{}: {}: {}", stepIndex == -1 ? "?" : Integer.toString(stepIndex + 1), submission.stepIds().size(), step.getId(), step.getName()); }); Step lastStep = emr.describeStep(new DescribeStepRequest() .withClusterId(submission.clusterId()) .withStepId(lastStepId)) .getStep(); String stepState = lastStep.getStatus().getState(); switch (stepState) { case "PENDING": case "RUNNING": return Optional.absent(); case "CANCEL_PENDING": case "CANCELLED": case "FAILED": case "INTERRUPTED": // TODO: consider task done if action_on_failure == CONTINUE? // TODO: include & log failure information List<StepSummary> steps = Lists.partition(submission.stepIds(), LIST_STEPS_MAX_IDS).stream() .flatMap(ids -> emr.listSteps(new ListStepsRequest() .withClusterId(submission.clusterId()) .withStepIds(ids)).getSteps().stream()) .collect(toList()); logger.error("EMR job failed: {}", submission.clusterId()); for (StepSummary step : steps) { StepStatus status = step.getStatus(); FailureDetails details = status.getFailureDetails(); StepStateChangeReason reason = status.getStateChangeReason(); int stepIndex = submission.stepIds().indexOf(step.getId()); logger.error("EMR step {}/{}: {}: state: {}, reason: {}, details: {}", stepIndex == -1 ? "?" : Integer.toString(stepIndex + 1), submission.stepIds().size(), step.getId(), status.getState(), reason != null ? reason : "{}", details != null ? details : "{}"); } throw new TaskExecutionException("EMR job failed", ConfigElement.empty()); case "COMPLETED": logger.info("EMR steps done"); return Optional.of(lastStep); default: throw new RuntimeException("Unknown step status: " + lastStep); } }); } private TaskResult result(SubmissionResult submission) { ImmutableTaskResult.Builder result = TaskResult.defaultBuilder(request); if (submission.newCluster()) { Config storeParams = request.getConfig().getFactory().create(); storeParams.getNestedOrSetEmpty("emr").set("last_cluster_id", submission.clusterId()); result.storeParams(storeParams); result.addResetStoreParams(ConfigKey.of("emr", "last_cluster_id")); } return result.build(); } private Submitter existingClusterSubmitter(AmazonElasticMapReduce emr, String tag, StepCompiler stepCompiler, String clusterId, Filer filer) { return () -> { List<String> stepIds = pollingRetryExecutor(state, "submission") .retryUnless(AmazonServiceException.class, Aws::isDeterministicException) .withRetryInterval(DurationInterval.of(Duration.ofSeconds(30), Duration.ofMinutes(5))) .runOnce(new TypeReference<List<String>>() {}, s -> { RemoteFile runner = prepareRunner(filer, tag); // Compile steps stepCompiler.compile(runner); // Stage files to S3 filer.stageFiles(); AddJobFlowStepsRequest request = new AddJobFlowStepsRequest() .withJobFlowId(clusterId) .withSteps(stepCompiler.stepConfigs()); int steps = request.getSteps().size(); logger.info("Submitting {} EMR step(s) to {}", steps, clusterId); AddJobFlowStepsResult result = emr.addJobFlowSteps(request); logSubmittedSteps(clusterId, steps, i -> request.getSteps().get(i).getName(), i -> result.getStepIds().get(i)); return ImmutableList.copyOf(result.getStepIds()); }); return SubmissionResult.ofExistingCluster(clusterId, stepIds); }; } private void logSubmittedSteps(String clusterId, int n, Function<Integer, String> names, Function<Integer, String> ids) { logger.info("Submitted {} EMR step(s) to {}", n, clusterId); for (int i = 0; i < n; i++) { logger.info("Step {}/{}: {}: {}", i + 1, n, names.apply(i), ids.apply(i)); } } private Submitter newClusterSubmitter(AmazonElasticMapReduce emr, String tag, StepCompiler stepCompiler, Config clusterConfig, Filer filer, ParameterCompiler parameterCompiler) { return () -> { // Start cluster NewCluster cluster = pollingRetryExecutor(state, "submission") .withRetryInterval(DurationInterval.of(Duration.ofSeconds(30), Duration.ofMinutes(5))) // TODO: EMR requests are not idempotent, thus retrying might produce duplicate cluster submissions. .retryUnless(AmazonServiceException.class, Aws::isDeterministicException) .runOnce(NewCluster.class, s -> submitNewClusterRequest(emr, tag, stepCompiler, clusterConfig, filer, parameterCompiler)); // Get submitted step IDs List<String> stepIds = pollingRetryExecutor(this.state, "steps") .withRetryInterval(DurationInterval.of(Duration.ofSeconds(30), Duration.ofMinutes(5))) .retryUnless(AmazonServiceException.class, Aws::isDeterministicException) .runOnce(new TypeReference<List<String>>() {}, s -> { List<StepSummary> steps = listSubmittedSteps(emr, tag, cluster); logSubmittedSteps(cluster.id(), cluster.steps(), i -> steps.get(i).getName(), i -> steps.get(i).getId()); return steps.stream().map(StepSummary::getId).collect(toList()); }); // Log cluster status while waiting for it to come up pollingWaiter(state, "bootstrap") .withWaitMessage("EMR cluster still booting") .withPollInterval(DurationInterval.of(Duration.ofSeconds(30), Duration.ofMinutes(5))) .awaitOnce(String.class, pollState -> checkClusterBootStatus(emr, cluster, pollState)); return SubmissionResult.ofNewCluster(cluster.id(), stepIds); }; } private Optional<String> checkClusterBootStatus(AmazonElasticMapReduce emr, NewCluster cluster, TaskState state) { // Only creating a cluster, with no steps? boolean createOnly = cluster.steps() == 0; DescribeClusterResult describeClusterResult = pollingRetryExecutor(state, "describe-cluster") .withRetryInterval(DurationInterval.of(Duration.ofSeconds(30), Duration.ofMinutes(5))) .retryUnless(AmazonServiceException.class, Aws::isDeterministicException) .run(ds -> emr.describeCluster(new DescribeClusterRequest().withClusterId(cluster.id()))); ClusterStatus clusterStatus = describeClusterResult.getCluster().getStatus(); String clusterState = clusterStatus.getState(); switch (clusterState) { case "STARTING": logger.info("EMR cluster starting: {}", cluster.id()); return Optional.absent(); case "BOOTSTRAPPING": logger.info("EMR cluster bootstrapping: {}", cluster.id()); return Optional.absent(); case "RUNNING": case "WAITING": logger.info("EMR cluster up: {}", cluster.id()); return Optional.of(clusterState); case "TERMINATED_WITH_ERRORS": if (createOnly) { // TODO: log more information about the errors // TODO: inspect state change reason to figure out whether it was the boot that failed or e.g. steps submitted by another agent throw new TaskExecutionException("EMR boot failed: " + cluster.id(), ConfigElement.empty()); } return Optional.of(clusterState); case "TERMINATING": if (createOnly) { // Keep waiting for the final state // TODO: inspect state change reason and bail early here return Optional.absent(); } return Optional.of(clusterState); case "TERMINATED": return Optional.of(clusterState); default: throw new RuntimeException("Unknown EMR cluster state: " + clusterState); } } private NewCluster submitNewClusterRequest(AmazonElasticMapReduce emr, String tag, StepCompiler stepCompiler, Config cluster, Filer filer, ParameterCompiler parameterCompiler) throws IOException { RemoteFile runner = prepareRunner(filer, tag); // Compile steps stepCompiler.compile(runner); List<StepConfig> stepConfigs = stepCompiler.stepConfigs(); Config ec2 = cluster.getNested("ec2"); Config master = ec2.getNestedOrGetEmpty("master"); List<Config> core = ec2.getOptional("core", Config.class).transform(ImmutableList::of).or(ImmutableList.of()); List<Config> task = ec2.getListOrEmpty("task", Config.class); List<String> applications = cluster.getListOrEmpty("applications", String.class); if (applications.isEmpty()) { applications = ImmutableList.of("Hadoop", "Hive", "Spark", "Flink"); } // TODO: allow configuring additional application parameters List<Application> applicationConfigs = applications.stream() .map(application -> new Application().withName(application)) .collect(toList()); // TODO: merge configurations with the same classification? List<Configuration> configurations = cluster.getListOrEmpty("configurations", JsonNode.class).stream() .map(this::configurations) .flatMap(Collection::stream) .collect(toList()); List<JsonNode> bootstrap = cluster.getListOrEmpty("bootstrap", JsonNode.class); List<BootstrapActionConfig> bootstrapActions = new ArrayList<>(); for (int i = 0; i < bootstrap.size(); i++) { bootstrapActions.add(bootstrapAction(i + 1, bootstrap.get(i), tag, filer, runner, parameterCompiler)); } // Stage files to S3 filer.stageFiles(); Optional<String> subnetId = ec2.getOptional("subnet_id", String.class); String defaultMasterInstanceType; String defaultCoreInstanceType; String defaultTaskInstanceType; if (subnetId.isPresent()) { // m4 requires VPC (subnet id) defaultMasterInstanceType = "m4.2xlarge"; defaultCoreInstanceType = "m4.xlarge"; defaultTaskInstanceType = "m4.xlarge"; } else { defaultMasterInstanceType = "m3.2xlarge"; defaultCoreInstanceType = "m3.xlarge"; defaultTaskInstanceType = "m3.xlarge"; } RunJobFlowRequest request = new RunJobFlowRequest() .withName(cluster.get("name", String.class, "Digdag") + " (" + tag + ")") .withReleaseLabel(cluster.get("release", String.class, "emr-5.2.0")) .withSteps(stepConfigs) .withBootstrapActions(bootstrapActions) .withApplications(applicationConfigs) .withLogUri(cluster.get("logs", String.class, null)) .withJobFlowRole(cluster.get("cluster_role", String.class, "EMR_EC2_DefaultRole")) .withServiceRole(cluster.get("service_role", String.class, "EMR_DefaultRole")) .withTags(new Tag().withKey("DIGDAG_CLUSTER_ID").withValue(tag)) .withVisibleToAllUsers(cluster.get("visible", boolean.class, true)) .withConfigurations(configurations) .withInstances(new JobFlowInstancesConfig() .withInstanceGroups(ImmutableList.<InstanceGroupConfig>builder() // Master Node .add(instanceGroupConfig("Master", master, "MASTER", defaultMasterInstanceType, 1)) // Core Group .addAll(instanceGroupConfigs("Core", core, "CORE", defaultCoreInstanceType)) // Task Groups .addAll(instanceGroupConfigs("Task %d", task, "TASK", defaultTaskInstanceType)) .build() ) .withAdditionalMasterSecurityGroups(ec2.getListOrEmpty("additional_master_security_groups", String.class)) .withAdditionalSlaveSecurityGroups(ec2.getListOrEmpty("additional_slave_security_groups", String.class)) .withEmrManagedMasterSecurityGroup(ec2.get("emr_managed_master_security_group", String.class, null)) .withEmrManagedSlaveSecurityGroup(ec2.get("emr_managed_slave_security_group", String.class, null)) .withServiceAccessSecurityGroup(ec2.get("service_access_security_group", String.class, null)) .withTerminationProtected(cluster.get("termination_protected", boolean.class, false)) .withPlacement(cluster.getOptional("availability_zone", String.class) .transform(zone -> new PlacementType().withAvailabilityZone(zone)).orNull()) .withEc2SubnetId(subnetId.orNull()) .withEc2KeyName(ec2.get("key", String.class)) .withKeepJobFlowAliveWhenNoSteps(!cluster.get("auto_terminate", boolean.class, true))); logger.info("Submitting EMR job with {} steps(s)", request.getSteps().size()); RunJobFlowResult result = emr.runJobFlow(request); logger.info("Submitted EMR job with {} step(s): {}", request.getSteps().size(), result.getJobFlowId(), result); return NewCluster.of(result.getJobFlowId(), request.getSteps().size()); } private List<InstanceGroupConfig> instanceGroupConfigs(String defaultName, List<Config> configs, String role, String defaultInstanceType) { List<InstanceGroupConfig> instanceGroupConfigs = new ArrayList<>(); for (int i = 0; i < configs.size(); i++) { Config config = configs.get(i); instanceGroupConfigs.add(instanceGroupConfig(String.format(defaultName, i + 1), config, role, defaultInstanceType)); } return instanceGroupConfigs; } private InstanceGroupConfig instanceGroupConfig(String defaultName, Config config, String role, String defaultInstanceType) { int instanceCount = config.get("count", int.class, 0); return instanceGroupConfig(defaultName, config, role, defaultInstanceType, instanceCount); } private InstanceGroupConfig instanceGroupConfig(String defaultName, Config config, String role, String defaultInstanceType, int instanceCount) { return new InstanceGroupConfig() .withName(config.get("name", String.class, defaultName)) .withInstanceRole(role) .withInstanceCount(instanceCount) .withInstanceType(config.get("type", String.class, defaultInstanceType)) .withMarket(config.get("market", String.class, null)) .withBidPrice(config.get("bid_price", String.class, null)) .withEbsConfiguration(config.getOptional("ebs", Config.class).transform(this::ebsConfiguration).orNull()) .withConfigurations(config.getListOrEmpty("configurations", JsonNode.class).stream() .map(this::configurations) .flatMap(Collection::stream) .collect(toList())); } private EbsConfiguration ebsConfiguration(Config config) { return new EbsConfiguration() .withEbsOptimized(config.get("optimized", Boolean.class, null)) .withEbsBlockDeviceConfigs(config.getListOrEmpty("devices", Config.class).stream() .map(this::ebsBlockDeviceConfig) .collect(toList())); } private EbsBlockDeviceConfig ebsBlockDeviceConfig(Config config) { return new EbsBlockDeviceConfig() .withVolumeSpecification(volumeSpecification(config.getNested("volume_specification"))) .withVolumesPerInstance(config.get("volumes_per_instance", Integer.class, null)); } private VolumeSpecification volumeSpecification(Config config) { return new VolumeSpecification() .withIops(config.get("iops", Integer.class, null)) .withSizeInGB(config.get("size_in_gb", Integer.class)) .withVolumeType(config.get("type", String.class)); } private List<StepSummary> listSubmittedSteps(AmazonElasticMapReduce emr, String tag, NewCluster cluster) { List<StepSummary> steps = new ArrayList<>(); ListStepsRequest request = new ListStepsRequest().withClusterId(cluster.id()); while (steps.size() < cluster.steps()) { ListStepsResult result = emr.listSteps(request); for (StepSummary step : result.getSteps()) { if (step.getName().contains(tag)) { steps.add(step); } } if (result.getMarker() == null) { break; } request.setMarker(result.getMarker()); } // The ListSteps api returns steps in reverse order. So reverse them to submission order. Collections.reverse(steps); return steps; } private BootstrapActionConfig bootstrapAction(int index, JsonNode action, String tag, Filer filer, RemoteFile runner, ParameterCompiler parameterCompiler) throws IOException { String script; String name; FileReference reference; Config config; if (action.isTextual()) { script = action.asText(); reference = fileReference("bootstrap", script); name = reference.filename(); config = request.getConfig().getFactory().create(); } else if (action.isObject()) { config = request.getConfig().getFactory().create(action); script = config.get("path", String.class); reference = fileReference("bootstrap", script); name = config.get("name", String.class, reference.filename()); } else { throw new ConfigException("Invalid bootstrap action: " + action); } RemoteFile file = filer.prepareRemoteFile(tag, "bootstrap", Integer.toString(index), reference, false); CommandRunnerConfiguration configuration = CommandRunnerConfiguration.builder() .workingDirectory(bootstrapWorkingDirectory(index)) .env(parameterCompiler.parameters(config.getNestedOrGetEmpty("env"), "env", (key, value) -> value)) .addDownload(DownloadConfig.of(file, 0777)) .addAllDownload(config.getListOrEmpty("files", String.class).stream() .map(r -> fileReference("file", r)) .map(r -> filer.prepareRemoteFile(tag, "bootstrap", Integer.toString(index), r, false, bootstrapWorkingDirectory(index))) .collect(toList())) .addCommand(file.localPath()) .addAllCommand(parameterCompiler.parameters(config, "args")) .build(); FileReference configurationFileReference = ImmutableFileReference.builder() .type(FileReference.Type.DIRECT) .contents(objectMapper.writeValueAsBytes(configuration)) .filename("config.json") .build(); RemoteFile remoteConfigurationFile = filer.prepareRemoteFile(tag, "bootstrap", Integer.toString(index), configurationFileReference, false); return new BootstrapActionConfig() .withName(name) .withScriptBootstrapAction(new ScriptBootstrapActionConfig() .withPath(runner.s3Uri().toString()) .withArgs(remoteConfigurationFile.s3Uri().toString())); } private String bootstrapWorkingDirectory(int index) { return LOCAL_STAGING_DIR + "/bootstrap/" + index + "/wd"; } private List<Configuration> configurations(JsonNode node) { if (node.isTextual()) { // File String configurationJson; try { configurationJson = workspace.templateFile(templateEngine, node.asText(), UTF_8, params); } catch (IOException | TemplateException e) { throw new TaskExecutionException(e, TaskExecutionException.buildExceptionErrorConfig(e)); } List<ConfigurationJson> values; try { values = objectMapper.readValue(configurationJson, new TypeReference<List<ConfigurationJson>>() {}); } catch (IOException e) { throw new ConfigException("Invalid EMR configuration file: " + node.asText()); } return values.stream() .map(ConfigurationJson::toConfiguration) .collect(toList()); } else if (node.isObject()) { // Embedded configuration Config config = cf.create(node); return ImmutableList.of(new Configuration() .withConfigurations(config.getListOrEmpty("configurations", JsonNode.class).stream() .map(this::configurations) .flatMap(Collection::stream) .collect(toList())) .withClassification(config.get("classification", String.class, null)) .withProperties(config.get("properties", new TypeReference<Map<String, String>>() {}, null))); } else { throw new ConfigException("Invalid EMR configuration: '" + node + "'"); } } } private static class Filer { private final AmazonS3Client s3; private final Optional<AmazonS3URI> staging; private final Workspace workspace; private final TemplateEngine templateEngine; private final Config params; private final List<StagingFile> files = new ArrayList<>(); private final Set<String> ids = new HashSet<>(); Filer(AmazonS3Client s3, Optional<AmazonS3URI> staging, Workspace workspace, TemplateEngine templateEngine, Config params) { this.s3 = s3; this.staging = staging; this.workspace = workspace; this.templateEngine = templateEngine; this.params = params; } RemoteFile prepareRemoteFile(String tag, String section, String path, FileReference reference, boolean template) { return prepareRemoteFile(tag, section, path, reference, template, null); } RemoteFile prepareRemoteFile(String tag, String section, String path, FileReference reference, boolean template, String localDir) { String id = randomTag(s -> !ids.add(s)); String prefix = tag + "/" + section + "/" + path + "/" + id; if (localDir == null) { localDir = LOCAL_STAGING_DIR + "/" + prefix; } ImmutableRemoteFile.Builder builder = ImmutableRemoteFile.builder() .reference(reference) .localPath(localDir + "/" + reference.filename()); if (reference.local()) { // Local file? Then we need to upload it to S3. if (!staging.isPresent()) { throw new ConfigException("Please configure a S3 'staging' directory"); } String baseKey = staging.get().getKey(); String key = (baseKey != null ? baseKey : "") + prefix + "/" + reference.filename(); builder.s3Uri(new AmazonS3URI("s3://" + staging.get().getBucket() + "/" + key)); } else { builder.s3Uri(new AmazonS3URI(reference.reference().get())); } RemoteFile remoteFile = builder.build(); if (reference.local()) { files.add(StagingFile.of(template, remoteFile)); } return remoteFile; } void stageFiles() { if (files.isEmpty()) { return; } TransferManager transferManager = new TransferManager(s3); List<PutObjectRequest> requests = new ArrayList<>(); for (StagingFile f : files) { logger.info("Staging {} -> {}", f.file().reference().filename(), f.file().s3Uri()); requests.add(stagingFilePutRequest(f)); } try { List<Upload> uploads = requests.stream() .map(transferManager::upload) .collect(toList()); for (Upload upload : uploads) { try { upload.waitForCompletion(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new TaskExecutionException(e, TaskExecutionException.buildExceptionErrorConfig(e)); } } } finally { transferManager.shutdownNow(false); requests.forEach(r -> closeQuietly(r.getInputStream())); } } private PutObjectRequest stagingFilePutRequest(StagingFile file) { AmazonS3URI uri = file.file().s3Uri(); FileReference reference = file.file().reference(); switch (reference.type()) { case LOCAL: { if (file.template()) { String content; try { content = workspace.templateFile(templateEngine, reference.filename(), UTF_8, params); } catch (IOException | TemplateException e) { throw new ConfigException("Failed to load file: " + file.file().reference().filename(), e); } byte[] bytes = content.getBytes(UTF_8); ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentLength(bytes.length); return new PutObjectRequest(uri.getBucket(), uri.getKey(), new ByteArrayInputStream(bytes), metadata); } else { return new PutObjectRequest(uri.getBucket(), uri.getKey(), workspace.getFile(reference.filename())); } } case RESOURCE: { byte[] bytes; try { bytes = Resources.toByteArray(new URL(reference.reference().get())); } catch (IOException e) { throw new TaskExecutionException(e, TaskExecutionException.buildExceptionErrorConfig(e)); } ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentLength(bytes.length); return new PutObjectRequest(uri.getBucket(), uri.getKey(), new ByteArrayInputStream(bytes), metadata); } case DIRECT: byte[] bytes = reference.contents().get(); ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentLength(bytes.length); return new PutObjectRequest(uri.getBucket(), uri.getKey(), new ByteArrayInputStream(bytes), metadata); case S3: default: throw new AssertionError(); } } void tryCleanup() { if (!staging.isPresent()) { return; } String bucket = staging.get().getBucket(); ListObjectsRequest req = new ListObjectsRequest() .withBucketName(bucket) .withPrefix(staging.get().getKey()); do { ObjectListing res = s3.listObjects(req); String[] keys = res.getObjectSummaries().stream() .map(S3ObjectSummary::getKey) .toArray(String[]::new); for (String key : keys) { logger.info("Removing s3://{}/{}", bucket, key); } try { RetryExecutor.retryExecutor() .withRetryLimit(3) .withInitialRetryWait(100) .retryIf(e -> !(e instanceof AmazonServiceException) || !Aws.isDeterministicException((AmazonServiceException) e)) .run(() -> s3.deleteObjects(new DeleteObjectsRequest(bucket).withKeys(keys))); } catch (RetryExecutor.RetryGiveupException e) { logger.info("Failed to delete staging files in {}", staging.get(), e.getCause()); } req.setMarker(res.getMarker()); } while (req.getMarker() != null); } } private static class StepCompiler { private final ParameterCompiler pc; private final String tag; private final List<Config> steps; private final Filer filer; private final ObjectMapper objectMapper; private final String defaultActionOnFailure; private List<StepConfig> configs; private int index; private Config step; private RemoteFile runner; StepCompiler(String tag, List<Config> steps, Filer filer, ParameterCompiler pc, ObjectMapper objectMapper, String defaultActionOnFailure) { this.tag = Preconditions.checkNotNull(tag, "tag"); this.steps = Preconditions.checkNotNull(steps, "steps"); this.filer = Preconditions.checkNotNull(filer, "filer"); this.pc = Preconditions.checkNotNull(pc, "pc"); this.objectMapper = Preconditions.checkNotNull(objectMapper, "objectMapper"); this.defaultActionOnFailure = Preconditions.checkNotNull(defaultActionOnFailure, "defaultActionOnFailure"); Preconditions.checkArgument(!steps.isEmpty(), "steps"); } private void compile(RemoteFile runner) throws IOException { this.runner = runner; configs = new ArrayList<>(); index = 1; for (int i = 0; i < steps.size(); i++, index++) { step = steps.get(i); String type = step.get("type", String.class); switch (type) { case "flink": flinkStep(); break; case "hive": hiveStep(); break; case "spark": sparkStep(); break; case "spark-sql": sparkSqlStep(); break; case "script": scriptStep(); break; case "command": commandStep(); break; default: throw new ConfigException("Unsupported step type: " + type); } } } List<StepConfig> stepConfigs() { Preconditions.checkState(configs != null); return configs; } private String localWorkingDirectory() { return LOCAL_STAGING_DIR + "/" + tag + "/steps/" + index + "/wd"; } private RemoteFile prepareRemoteFile(FileReference reference, boolean template, String localDir) { return filer.prepareRemoteFile(tag, "steps", Integer.toString(index), reference, template, localDir); } private RemoteFile prepareRemoteFile(FileReference reference, boolean template) { return filer.prepareRemoteFile(tag, "steps", Integer.toString(index), reference, template); } private void sparkStep() throws IOException { FileReference applicationReference = fileReference("application", step); boolean scala = applicationReference.filename().endsWith(".scala"); boolean python = applicationReference.filename().endsWith(".py"); boolean script = scala || python; RemoteFile applicationFile = prepareRemoteFile(applicationReference, script); List<String> files = step.getListOrEmpty("files", String.class); List<RemoteFile> filesFiles = files.stream() .map(r -> fileReference("file", r)) .map(r -> prepareRemoteFile(r, false, localWorkingDirectory())) .collect(toList()); List<String> filesArgs = filesFiles.isEmpty() ? ImmutableList.of() : ImmutableList.of("--files", filesFiles.stream().map(RemoteFile::localPath).collect(Collectors.joining(","))); List<String> jars = step.getListOrEmpty("jars", String.class); List<RemoteFile> jarFiles = jars.stream() .map(r -> fileReference("jar", r)) .map(r -> prepareRemoteFile(r, false)) .collect(toList()); List<String> jarArgs = jarFiles.isEmpty() ? ImmutableList.of() : ImmutableList.of("--jars", jarFiles.stream().map(RemoteFile::localPath).collect(Collectors.joining(","))); Config conf = step.getNestedOrderedOrGetEmpty("conf"); List<Parameter> confArgs = pc.parameters("--conf", conf, "conf", (key, value) -> key + "=" + value); List<String> classArgs = step.getOptional("class", String.class) .transform(s -> ImmutableList.of("--class", s)) .or(ImmutableList.of()); String name; if (scala) { name = "Spark Shell Script"; } else if (python) { name = "Spark Py Script"; } else { name = "Spark Application"; } CommandRunnerConfiguration.Builder configuration = CommandRunnerConfiguration.builder() .workingDirectory(localWorkingDirectory()); configuration.addDownload(applicationFile); configuration.addAllDownload(jarFiles); configuration.addAllDownload(filesFiles); String command; List<String> applicationArgs; String deployMode; List<String> args = step.getListOrEmpty("args", String.class); if (scala) { // spark-shell needs the script to explicitly exit, otherwise it will wait forever for user input. // Fortunately spark-shell accepts multiple scripts on the command line, so we append a helper script to run last and exit the shell. // This could also have been accomplished by wrapping the spark-shell invocation in a bash session that concatenates the exit command onto the user script using // anonymous fifo's etc but that seems a bit more brittle. Also, this way the actual names of the scripts appear in logs instead of /dev/fd/47 etc. String exitHelperFilename = "exit-helper.scala"; URL exitHelperResource = Resources.getResource(EmrOperatorFactory.class, exitHelperFilename); FileReference exitHelperFileReference = ImmutableFileReference.builder() .reference(exitHelperResource.toString()) .type(FileReference.Type.RESOURCE) .filename(exitHelperFilename) .build(); RemoteFile exitHelperFile = prepareRemoteFile(exitHelperFileReference, false); configuration.addDownload(exitHelperFile); command = "spark-shell"; applicationArgs = ImmutableList.of("-i", applicationFile.localPath(), exitHelperFile.localPath()); String requiredDeployMode = "client"; deployMode = step.get("deploy_mode", String.class, requiredDeployMode); if (!deployMode.equals(requiredDeployMode)) { throw new ConfigException("Only '" + requiredDeployMode + "' deploy_mode is supported for Spark shell scala scripts, got: '" + deployMode + "'"); } if (!args.isEmpty()) { throw new ConfigException("The 'args' parameter is not supported for Spark shell scala scripts, got: " + args); } } else { command = "spark-submit"; applicationArgs = ImmutableList.<String>builder() .add(applicationFile.localPath()) .addAll(args) .build(); deployMode = step.get("deploy_mode", String.class, "cluster"); } configuration.addAllCommand(command, "--deploy-mode", deployMode); configuration.addAllCommand(step.getListOrEmpty("submit_options", String.class)); configuration.addAllCommand(jarArgs); configuration.addAllCommand(filesArgs); configuration.addAllCommand(confArgs); configuration.addAllCommand(classArgs); configuration.addAllCommand(applicationArgs); addStep(name, configuration.build()); } private void sparkSqlStep() throws IOException { FileReference wrapperFileReference = FileReference.ofResource("spark-sql-wrapper.py"); RemoteFile wrapperFile = prepareRemoteFile(wrapperFileReference, false); FileReference queryReference = fileReference("query", step); RemoteFile queryFile = prepareRemoteFile(queryReference, true); List<String> jars = step.getListOrEmpty("jars", String.class); List<RemoteFile> jarFiles = jars.stream() .map(r -> fileReference("jar", r)) .map(r -> prepareRemoteFile(r, false)) .collect(toList()); List<String> jarArgs = jarFiles.isEmpty() ? ImmutableList.of() : ImmutableList.of("--jars", jarFiles.stream().map(RemoteFile::localPath).collect(Collectors.joining(","))); Config conf = step.getNestedOrderedOrGetEmpty("conf"); List<Parameter> confArgs = pc.parameters("--conf", conf, "conf", (key, value) -> key + "=" + value); CommandRunnerConfiguration configuration = CommandRunnerConfiguration.builder() .workingDirectory(localWorkingDirectory()) .addDownload(wrapperFile) .addDownload(queryFile) .addAllCommand("spark-submit") .addAllCommand("--deploy-mode", step.get("deploy_mode", String.class, "cluster")) .addAllCommand("--files", queryFile.localPath()) .addAllCommand(confArgs) .addAllCommand(pc.parameters(step, "submit_options")) .addAllCommand(jarArgs) .addAllCommand(wrapperFile.localPath()) .addAllCommand(queryReference.filename(), step.get("result", String.class)) .build(); addStep("Spark Sql", configuration); } private void scriptStep() throws IOException { FileReference scriptReference = fileReference("script", step); RemoteFile scriptFile = prepareRemoteFile(scriptReference, false); List<String> files = step.getListOrEmpty("files", String.class); List<RemoteFile> filesFiles = files.stream() .map(r -> fileReference("file", r)) .map(r -> prepareRemoteFile(r, false, localWorkingDirectory())) .collect(toList()); CommandRunnerConfiguration configuration = CommandRunnerConfiguration.builder() .workingDirectory(localWorkingDirectory()) .env(pc.parameters(step.getNestedOrGetEmpty("env"), "env", (key, value) -> value)) .addDownload(DownloadConfig.of(scriptFile, 0777)) .addAllDownload(filesFiles) .addAllCommand(scriptFile.localPath()) .addAllCommand(pc.parameters(step, "args")) .build(); addStep("Script", configuration); } private void flinkStep() throws IOException { String name = "Flink Application"; FileReference fileReference = fileReference("application", step); RemoteFile remoteFile = prepareRemoteFile(fileReference, false); CommandRunnerConfiguration configuration = CommandRunnerConfiguration.builder() .workingDirectory(localWorkingDirectory()) .addDownload(remoteFile) .addAllCommand( "flink", "run", "-m", "yarn-cluster", "-yn", Integer.toString(step.get("yarn_containers", int.class, 2)), remoteFile.localPath()) .addAllCommand(pc.parameters(step, "args")) .build(); addStep(name, configuration); } private void hiveStep() throws IOException { FileReference scriptReference = fileReference("script", step); RemoteFile remoteScript = prepareRemoteFile(scriptReference, false); CommandRunnerConfiguration configuration = CommandRunnerConfiguration.builder() .workingDirectory(localWorkingDirectory()) .addAllCommand("hive-script", "--run-hive-script", "--args", "-f", remoteScript.s3Uri().toString()) .addAllCommand(pc.parameters("-d", step.getNestedOrGetEmpty("vars"), "vars", (key, value) -> key + "=" + value)) .addAllCommand(pc.parameters("-hiveconf", step.getNestedOrGetEmpty("hiveconf"), "hiveconf", (key, value) -> key + "=" + value)) .build(); addStep("Hive Script", configuration); } private void commandStep() throws IOException { CommandRunnerConfiguration configuration = CommandRunnerConfiguration.builder() .workingDirectory(localWorkingDirectory()) .env(pc.parameters(step.getNestedOrGetEmpty("env"), "env", (key, value) -> value)) .addAllDownload(step.getListOrEmpty("files", String.class).stream() .map(r -> fileReference("file", r)) .map(r -> prepareRemoteFile(r, false, localWorkingDirectory())) .collect(toList())) .addAllCommand(step.get("command", String.class)) .addAllCommand(pc.parameters(step, "args")) .build(); addStep("Command", configuration); } private void addStep(String name, CommandRunnerConfiguration configuration) throws IOException { FileReference configurationFileReference = ImmutableFileReference.builder() .type(FileReference.Type.DIRECT) .contents(objectMapper.writeValueAsBytes(configuration)) .filename("config.json") .build(); RemoteFile remoteConfigurationFile = prepareRemoteFile(configurationFileReference, false); StepConfig runStep = stepConfig(name, tag, step) .withHadoopJarStep(stepFactory().newScriptRunnerStep(runner.s3Uri().toString(), remoteConfigurationFile.s3Uri().toString())); configs.add(runStep); } private StepFactory stepFactory() { // TODO: configure region return new StepFactory(); } private StepConfig stepConfig(String defaultName, String tag, Config step) { String name = step.get("name", String.class, defaultName); return new StepConfig() .withName(name + " (" + tag + ")") // TERMINATE_JOB_FLOW | TERMINATE_CLUSTER | CANCEL_AND_WAIT | CONTINUE .withActionOnFailure(step.get("action_on_failure", String.class, defaultActionOnFailure)); } } private static class ParameterCompiler { private final AWSKMSClient kms; private final TaskExecutionContext ctx; private final ConfigFactory cf; ParameterCompiler(AWSKMSClient kms, TaskExecutionContext ctx, ConfigFactory cf) { this.kms = Preconditions.checkNotNull(kms, "kms"); this.ctx = Preconditions.checkNotNull(ctx, "ctx"); this.cf = Preconditions.checkNotNull(cf, "cf"); } private List<Parameter> parameters(String flag, Config config, String name, BiFunction<String, String, String> f) { return parameters(config, name, f).values().stream() .flatMap(p -> Stream.of(Parameter.ofPlain(flag), p)) .collect(toList()); } private List<Parameter> parameters(Config config, String key) { return config.getListOrEmpty(key, JsonNode.class).stream() .map(node -> parameter(key, node, Function.identity())) .collect(toList()); } private Map<String, Parameter> parameters(Config config, String name, BiFunction<String, String, String> f) { return config.getKeys().stream() .collect(toMap( Function.identity(), key -> parameter(name, config, key, f))); } private Parameter parameter(String name, Config config, String key, BiFunction<String, String, String> f) { JsonNode node = config.get(key, JsonNode.class); return parameter(name, node, value -> f.apply(key, value)); } private Parameter parameter(String name, JsonNode value, Function<String, String> f) { if (value.isObject()) { String secretKey = cf.create(value).get("secret", String.class); String secretValue = ctx.secrets().getSecret(secretKey); return Parameter.ofKmsEncrypted(kmsEncrypt(f.apply(secretValue))); } else if (value.isArray()) { throw new ConfigException("Invalid '" + name + "' value: '" + value + "'"); } else { return Parameter.ofPlain(f.apply(value.asText())); } } private String kmsEncrypt(String value) { String kmsKeyId = ctx.secrets().getSecret("aws.emr.kms_key_id"); EncryptResult result = kms.encrypt(new EncryptRequest().withKeyId(kmsKeyId).withPlaintext(UTF_8.encode(value))); return base64(result.getCiphertextBlob()); } private String base64(ByteBuffer bb) { byte[] bytes = new byte[bb.remaining()]; bb.get(bytes); return Base64.getEncoder().encodeToString(bytes); } } private static FileReference fileReference(String key, Config config) { String reference = config.get(key, String.class); return fileReference(key, reference); } private static FileReference fileReference(String key, String reference) { if (reference.startsWith("s3:")) { // File on S3 AmazonS3URI uri; try { uri = new AmazonS3URI(reference); Preconditions.checkArgument(uri.getKey() != null & !uri.getKey().endsWith("/"), "must be a file"); } catch (IllegalArgumentException e) { throw new ConfigException("Invalid " + key + ": '" + reference + "'", e); } return ImmutableFileReference.builder() .reference(reference) .filename(Iterables.getLast(Splitter.on('/').split(reference), "")) .type(S3) .build(); } else { // Local file return ImmutableFileReference.builder() .reference(reference) .filename(Paths.get(reference).getFileName().toString()) .type(LOCAL) .build(); } } private static String randomTag() { byte[] bytes = new byte[8]; ThreadLocalRandom.current().nextBytes(bytes); return BaseEncoding.base32().omitPadding().encode(bytes); } private static String randomTag(Function<String, Boolean> seen) { while (true) { String tag = randomTag(); if (!seen.apply(tag)) { return tag; } } } private static RemoteFile prepareRunner(Filer filer, String tag) { URL commandRunnerResource = Resources.getResource(EmrOperatorFactory.class, "runner.py"); FileReference commandRunnerFileReference = ImmutableFileReference.builder() .reference(commandRunnerResource.toString()) .type(FileReference.Type.RESOURCE) .filename("runner.py") .build(); return filer.prepareRemoteFile(tag, "shared", "scripts", commandRunnerFileReference, false); } @Value.Immutable @Value.Style(visibility = Value.Style.ImplementationVisibility.PACKAGE) interface StagingFile { boolean template(); RemoteFile file(); static StagingFile of(boolean template, RemoteFile remoteFile) { return ImmutableStagingFile.builder() .template(template) .file(remoteFile) .build(); } } @Value.Immutable @Value.Style(visibility = Value.Style.ImplementationVisibility.PACKAGE) interface SubmissionResult { boolean newCluster(); String clusterId(); List<String> stepIds(); static SubmissionResult ofNewCluster(String clusterId, List<String> stepIds) { return ImmutableSubmissionResult.builder() .newCluster(true) .clusterId(clusterId) .stepIds(stepIds) .build(); } static SubmissionResult ofExistingCluster(String clusterId, List<String> stepIds) { return ImmutableSubmissionResult.builder() .newCluster(false) .clusterId(clusterId) .stepIds(stepIds) .build(); } } @Value.Immutable @Value.Style(visibility = Value.Style.ImplementationVisibility.PACKAGE) interface FileReference { static FileReference ofResource(String name) { return ofResource(EmrOperatorFactory.class, name); } static FileReference ofResource(Class<?> contextClass, String name) { URL url = Resources.getResource(contextClass, name); return ImmutableFileReference.builder() .reference(url.toString()) .type(Type.RESOURCE) .filename(name) .build(); } enum Type { LOCAL, RESOURCE, S3, DIRECT, } Type type(); default boolean local() { return type() != S3; } Optional<String> reference(); Optional<byte[]> contents(); String filename(); @Value.Check default void validate() { if (type() == DIRECT) { Preconditions.checkArgument(!reference().isPresent()); Preconditions.checkArgument(contents().isPresent()); } else { Preconditions.checkArgument(reference().isPresent()); Preconditions.checkArgument(!contents().isPresent()); } } } @Value.Immutable @Value.Style(visibility = Value.Style.ImplementationVisibility.PACKAGE) interface RemoteFile { FileReference reference(); AmazonS3URI s3Uri(); String localPath(); } private interface Submitter { SubmissionResult submit(); } @Value.Immutable @Value.Style(visibility = Value.Style.ImplementationVisibility.PACKAGE) @JsonDeserialize(as = ImmutableConfigurationJson.class) interface ConfigurationJson { @JsonProperty("Classification") Optional<String> classification(); @JsonProperty("Configurations") List<ConfigurationJson> configurations(); @JsonProperty("Properties") Map<String, String> properties(); default Configuration toConfiguration() { return new Configuration() .withClassification(classification().orNull()) .withConfigurations(configurations().stream() .map(ConfigurationJson::toConfiguration) .collect(toList())) .withProperties(properties()); } } @Value.Immutable @Value.Style(visibility = Value.Style.ImplementationVisibility.PACKAGE) @JsonDeserialize(as = ImmutableCommandRunnerConfiguration.class) @JsonSerialize(as = ImmutableCommandRunnerConfiguration.class) interface CommandRunnerConfiguration { List<DownloadConfig> download(); @JsonProperty("working_directory") String workingDirectory(); Map<String, Parameter> env(); List<Parameter> command(); static Builder builder() { return new Builder(); } class Builder extends ImmutableCommandRunnerConfiguration.Builder { Builder addDownload(RemoteFile remoteFile) { return addDownload(DownloadConfig.of(remoteFile)); } Builder addAllDownload(RemoteFile... remoteFiles) { return addAllDownload(asList(remoteFiles)); } Builder addAllDownload(Collection<RemoteFile> remoteFiles) { return addAllDownload(remoteFiles.stream() .map(DownloadConfig::of) .collect(toList())); } Builder addCommand(String command) { return addCommand(Parameter.ofPlain(command)); } Builder addAllCommand(String... command) { return addAllCommand(asList(command)); } Builder addAllCommand(Collection<String> command) { return addAllCommand(Parameter.ofPlain(command)); } } } @Value.Immutable @Value.Style(visibility = Value.Style.ImplementationVisibility.PACKAGE) @JsonDeserialize(as = ImmutableParameter.class) @JsonSerialize(as = ImmutableParameter.class) interface Parameter { String type(); String value(); static Parameter ofPlain(String value) { return ImmutableParameter.builder().type("plain").value(value).build(); } static List<Parameter> ofPlain(String... values) { return ofPlain(asList(values)); } static List<Parameter> ofPlain(Collection<String> values) { return values.stream().map(Parameter::ofPlain).collect(toList()); } static Parameter ofKmsEncrypted(String value) { return ImmutableParameter.builder().type("kms_encrypted").value(value).build(); } static List<Parameter> ofKmsEncrypted(String... values) { return Stream.of(values).map(Parameter::ofKmsEncrypted).collect(toList()); } static List<Parameter> ofKmsEncrypted(Collection<String> values) { return values.stream().map(Parameter::ofKmsEncrypted).collect(toList()); } } @Value.Immutable @Value.Style(visibility = Value.Style.ImplementationVisibility.PACKAGE) @JsonSerialize(as = ImmutableDownloadConfig.class) @JsonDeserialize(as = ImmutableDownloadConfig.class) interface DownloadConfig { String src(); String dst(); Optional<Integer> mode(); static DownloadConfig of(String src, String dst) { return ImmutableDownloadConfig.builder().src(src).dst(dst).build(); } static DownloadConfig of(String src, String dst, int mode) { return ImmutableDownloadConfig.builder().src(src).dst(dst).mode(mode).build(); } static DownloadConfig of(RemoteFile remoteFile) { return of(remoteFile.s3Uri().toString(), remoteFile.localPath()); } static DownloadConfig of(RemoteFile remoteFile, int mode) { return of(remoteFile.s3Uri().toString(), remoteFile.localPath(), mode); } } @Value.Immutable @Value.Style(visibility = Value.Style.ImplementationVisibility.PACKAGE) @JsonSerialize(as = ImmutableNewCluster.class) @JsonDeserialize(as = ImmutableNewCluster.class) interface NewCluster { String id(); int steps(); static NewCluster of(String id, int steps) { return ImmutableNewCluster.builder() .id(id) .steps(steps) .build(); } } }
package org.opens.tanaguru.service; import java.util.List; import java.util.Set; import javax.xml.bind.annotation.XmlTransient; import org.opens.tanaguru.contentadapter.ContentAdapterFactory; import org.opens.tanaguru.contentadapter.ContentsAdapterFactory; import org.opens.tanaguru.contentadapter.HTMLCleanerFactory; import org.opens.tanaguru.contentadapter.HTMLParserFactory; import org.opens.tanaguru.contentadapter.util.URLIdentifierFactory; import org.opens.tanaguru.contentloader.DownloaderFactory; import org.opens.tanaguru.entity.audit.Content; import org.opens.tanaguru.entity.factory.audit.ContentFactory; import org.opens.tanaguru.entity.service.audit.ContentDataService; /** * * @author jkowalczyk */ @XmlTransient public interface ContentAdapterService {// TODO Write javadoc /** * * @param contentList * @param referential * @return */ List<Content> adaptContent(List<Content> contentList, String referential); /** * * @param contentFactory the content factory to set */ void setContentFactory(ContentFactory contentFactory); /** * * @param writeCleanHtmlInFile */ void setWriteCleanHtmlInFile(boolean writeCleanHtmlInFile); /** * * @param tempFolderRootPath */ void setTempFolderRootPath(String tempFolderRootPath); /** * * @param contentsAdapterFactory */ void setContentsAdapterFactory(ContentsAdapterFactory contentsAdapterFactory); /** * * @param htmlCleanerFactory */ void setHtmlCleanerFactory(HTMLCleanerFactory htmlCleanerFactory); /** * * @param htmlParserFactory */ void setHtmlParserFactory(HTMLParserFactory htmlParserFactory); /** * * @param contentAdapterFactorySet */ void setContentAdapterFactorySet(Set<ContentAdapterFactory> contentAdapterFactorySet); /** * * @param urlIdentifierFactory */ void setUrlIdentifierFactory(URLIdentifierFactory urlIdentifierFactory); /** * * @param downloaderFactory */ void setDownloaderFactory(DownloaderFactory downloaderFactory); /** * * @param contentDataService */ void setContentDataService(ContentDataService contentDataService); }
package com.fuelpowered.lib.fuelsdk.unity; import android.util.Log; import com.fuelpowered.lib.fuelsdk.fuel; import com.fuelpowered.lib.fuelsdk.fuelcompete; import com.fuelpowered.lib.fuelsdk.fuelcompeteui; import com.fuelpowered.lib.fuelsdk.fueldynamics; import com.fuelpowered.lib.fuelsdk.fuelignite; import com.fuelpowered.lib.fuelsdk.fueligniteui; import com.fuelpowered.lib.fuelsdk.fuelimpl.fueljsonhelper; import com.fuelpowered.lib.fuelsdk.fuelnotificationtype; import com.fuelpowered.lib.fuelsdk.fuelorientationtype; import com.unity3d.player.UnityPlayer; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; public final class FuelSDKUnitySingleton { /** * Data type enumeration. */ private static enum DataType { INTEGER("0"), LONG("1"), FLOAT("2"), DOUBLE("3"), BOOLEAN("4"), STRING("5"); /** * Mapping of values to their enumeration. */ private static Map<String, DataType> mValueEnumMap; /** * Static initializer. */ static { mValueEnumMap = new HashMap<String, DataType>(); for (DataType dataType : DataType.values()) { mValueEnumMap.put(dataType.mValue, dataType); } } /** * Enumeration value. */ private String mValue; /*********************************************************************** * Constructor. * * @param value Value to bind to the enumeration. */ private DataType(String value) { mValue = value; } /*********************************************************************** * Retrieves the enumeration that matches the given value. * * @param value Value to retrieve the matching enumeration for. * @return The matching enumeration to the given value, null if there is * no match. */ public static DataType findByValue(String value) { return mValueEnumMap.get(value); } } private static final String kLogTag = "FuelSDKUnitySingleton"; //--Fuel methods public static void initialize(String gameId, String gameSecret, boolean gameHasLogin, boolean gameHasInvite, boolean gameHasShare) { Log.i(kLogTag, "Initialize FuelSdkUnity"); fuel.setup(gameId, gameSecret, gameHasLogin, gameHasInvite, gameHasShare); fuel.instance().setLanguageCode("EN"); fueldynamics.setup(); Log.i(kLogTag, "Finished Initialize"); } public static void useSandbox() { fuel.useSandbox(); } public static void setNotificationToken(String notificationToken) { fuel.instance().setNotificationToken(notificationToken); } public static boolean enableNotification(fuelnotificationtype notificationType) { return fuel.instance().enableNotification(notificationType); } public static boolean disableNotification(fuelnotificationtype notificationType) { return fuel.instance().disableNotification(notificationType); } public static boolean isNotificationEnabled(fuelnotificationtype notificationType) { return fuel.instance().isNotificationEnabled(notificationType); } public static void setLanguageCode(String langCode) { fuel.instance().setLanguageCode(langCode); } public static boolean setNotificationIcon(int iconResId) { return fuel.instance().setNotificationIcon(iconResId); } public static boolean setNotificationIcon(String iconName) { return fuel.instance().setNotificationIcon(iconName); } public static boolean syncVirtualGoods() { return fuel.instance().syncVirtualGoods(); } public static boolean acknowledgeVirtualGoods(final String transactionId, final boolean consumed) { return fuel.instance().acknowledgeVirtualGoods(transactionId, consumed); } public static boolean sdkSocialLoginCompleted(Map<String, Object> loginData) { return fuel.instance().sdkSocialLoginCompleted(loginData); } public static boolean sdkSocialInviteCompleted() { return fuel.instance().sdkSocialInviteCompleted(); } public static boolean sdkSocialShareCompleted() { return fuel.instance().sdkSocialShareCompleted(); } //--Compete methods public static void initializeCompete() { Log.i(kLogTag, "Initialize Compete"); fuelcompete.setup(); } public static boolean submitMatchResult(String matchResultJSONString) { JSONObject matchResultJSON = null; try { matchResultJSON = new JSONObject(matchResultJSONString); } catch (JSONException jsonException) { return false; } Object matchResultObject = normalizeJSONObject(matchResultJSON); if (matchResultObject == null) { return false; } if (!(matchResultObject instanceof Map)) { return false; } @SuppressWarnings("unchecked") Map<String, Object> matchResult = (Map<String, Object>) matchResultObject; return fuelcompete.instance().submitMatchResult(matchResult); } public static void syncChallengeCounts() { fuelcompete.instance().syncChallengeCounts(); } public static void syncTournamentInfo() { fuelcompete.instance().syncTournamentInfo(); } //--Compete UI methods public static void initializeCompeteUI() { fuelcompeteui.setup(); } public static void setOrientationuiCompete(fuelorientationtype orientation) { fuelcompeteui.instance().setOrientation(orientation); } public static boolean Launch() { Log.i(kLogTag, "Launch"); return fuelcompeteui.instance().launch(); } //--Ignite methods public static void initializeIgnite() { Log.i(kLogTag, "Initialize fuelignite"); fuelignite.setup(); fueligniteui.setup(); fueligniteui.instance().setOrientation(fuelorientationtype.portrait); } public static boolean execMethod(String method, String params) { try { JSONArray jsonParams = new JSONArray(params); List<Object> paramsList = fueljsonhelper.sharedInstance().toList(jsonParams, false); return fuelignite.instance().execMethod(method, paramsList); }catch (JSONException e) { Log.e(kLogTag, "ExecMethod error method : "+ method + "; params:" + params + "; Exception" + e.toString()); return false; } } public static void sendProgress(String progress, String tags) { try { JSONObject jsonProgress = new JSONObject(progress); HashMap<String, Object> progressMap = (HashMap<String, Object>) fueljsonhelper.sharedInstance().toMap(jsonProgress); List<Object> tagsList = null; if( tags != null ) { JSONArray jsonTags = new JSONArray(tags); if (jsonTags != null) { tagsList = fueljsonhelper.sharedInstance().toList(jsonTags, false); } } fuelignite.instance().sendProgress(progressMap, tagsList); }catch (JSONException e) { Log.e(kLogTag, "sendProgress error: "+e.getMessage()); } } public static boolean getEvents(String eventTags) { try { List<Object> eventTagsList = null; if( eventTags != null ) { JSONArray jsonEventTags = new JSONArray(eventTags); eventTagsList = fueljsonhelper.sharedInstance().toList(jsonEventTags, false); } return fuelignite.instance().getEvents(eventTagsList); }catch (JSONException e) { return false; } } public static boolean getLeaderBoard(String boardID) { return fuelignite.instance().getLeaderBoard(boardID); } public static boolean getMission(String missionID) { return fuelignite.instance().getMission(missionID); } public static boolean getQuest(String questID) { return fuelignite.instance().getQuest(questID); } //--Ignite UI methods public static void initializeIgniteUI() { fueligniteui.setup(); } public static void setOrientationuiIgnite(fuelorientationtype orientation) { fueligniteui.instance().setOrientation(orientation); } //--Dynamics methods public static void initializeDynamics() { fueldynamics.setup(); } public static boolean setUserConditions(String userConditions) { try{ JSONObject jsonProgress = new JSONObject(userConditions); HashMap<String, Object> userConditionsMap = (HashMap<String, Object>) fueljsonhelper.sharedInstance().toMap(jsonProgress); return fueldynamics.instance().setUserConditions(userConditionsMap); }catch (JSONException e) { Log.e(kLogTag, "setUserConditions error: "+e.getMessage()); return false; } } public static boolean syncUserValues() { return fueldynamics.instance().syncUserValues(); } //--GCM public static void initializeGCM(String googleProjectID) { fuel.instance().initializeGCM(UnityPlayer.currentActivity, googleProjectID); } public static void onPause() { Log.i(kLogTag, "onPause"); } public static void onResume() { Log.i(kLogTag, "onResume"); } public static void onQuit() { Log.i(kLogTag, "onQuit"); } //--Utility methods /*************************************************************************** * Normalizes a JSON object into its deserialized form. Used to deserialize * JSON which includes value meta-data in order to preserve it's type. Does * not suffer the type conversion issues between ints versus longs, and * floats versus doubles since the internal representation of primitives are * strings. * * @param json JSON object to normalize. * @return The normalized JSON object, null otherwise. */ private static Object normalizeJSONObject(JSONObject json) { if (json == null) { return null; } if (isNormalizedJSONValue(json)) { return normalizeJSONValue(json); } Map<String, Object> resultMap = new HashMap<String, Object>(); Iterator<?> iterator = json.keys(); while (iterator.hasNext()) { String key = (String) iterator.next(); if (key == null) { continue; } Object value = json.opt(key); if (value == null) { continue; } Object normalizedValue = null; if (value instanceof JSONArray) { normalizedValue = normalizeJSONArray((JSONArray) value); } else if (value instanceof JSONObject) { normalizedValue = normalizeJSONObject((JSONObject) value); } else { continue; } if (normalizedValue == null) { continue; } resultMap.put(key, normalizedValue); } return resultMap; } /*************************************************************************** * Normalizes a JSON array into its deserialized form. Used to deserialize * JSON which includes value meta-data in order to preserve it's type. Does * not suffer the type conversion issues between ints versus longs, and * floats versus doubles since the internal representation of primitives are * strings. * * @param json JSON array to normalize. * @return The normalized JSON array, null otherwise. */ private static Object normalizeJSONArray(JSONArray json) { if (json == null) { return null; } List<Object> resultList = new ArrayList<Object>(); int count = json.length(); for (int index = 0; index < count; index++) { Object value = json.opt(index); if (value == null) { continue; } Object normalizedValue = null; if (value instanceof JSONArray) { normalizedValue = normalizeJSONArray((JSONArray) value); } else if (value instanceof JSONObject) { normalizedValue = normalizeJSONObject((JSONObject) value); } else { continue; } if (normalizedValue == null) { continue; } resultList.add(normalizedValue); } return resultList; } /*************************************************************************** * Normalizes a JSON value into its deserialized form. Used to deserialize * JSON which includes value meta-data in order to preserve it's type. Does * not suffer the type conversion issues between ints versus longs, and * floats versus doubles since the internal representation of primitives are * strings. * * @param json JSON value to normalize. * @return The normalized JSON value, null otherwise. */ private static Object normalizeJSONValue(JSONObject json) { if (json == null) { return null; } String type = (String) json.opt("type"); String value = (String) json.opt("value"); if ((type == null) || (value == null)) { return null; } DataType dataType = DataType.findByValue(type); if (dataType == null) { return null; } switch (dataType) { case INTEGER: try { return Integer.parseInt(value); } catch (NumberFormatException numberFormatException) { return null; } case LONG: try { return Long.parseLong(value); } catch (NumberFormatException numberFormatException) { return null; } case FLOAT: try { return Float.parseFloat(value); } catch (NumberFormatException numberFormatException) { return null; } case DOUBLE: try { return Double.parseDouble(value); } catch (NumberFormatException numberFormatException) { return null; } case BOOLEAN: return Boolean.parseBoolean(value); case STRING: return value; default: return null; } } /*************************************************************************** * Validates whether or not the given JSON object is a serialized JSON * value. * * @param json JSON object to validate. * @return True if the given JSON object is a serialized JSON value, false * otherwise. */ private static boolean isNormalizedJSONValue(JSONObject json) { if (json == null) { return false; } Object checksumObject = json.opt("checksum"); if (!(checksumObject instanceof String)) { return false; } String checksum = (String) checksumObject; if (!checksum.equals("faddface")) { return false; } Object typeObject = json.opt("type"); if (!(typeObject instanceof String)) { return false; } String type = (String) json.opt("type"); if (DataType.findByValue(type) == null) { return false; } Object valueObject = json.opt("value"); if (!(valueObject instanceof String)) { return false; } return true; } }
package uk.ac.ebi.quickgo.geneproduct.service; import uk.ac.ebi.quickgo.common.loader.DbXRefLoader; import uk.ac.ebi.quickgo.common.validator.DbXRefEntity; import uk.ac.ebi.quickgo.common.validator.DbXRefEntityValidation; import uk.ac.ebi.quickgo.geneproduct.common.GeneProductRepoConfig; import uk.ac.ebi.quickgo.geneproduct.common.GeneProductRepository; import uk.ac.ebi.quickgo.geneproduct.service.converter.GeneProductDocConverter; import uk.ac.ebi.quickgo.geneproduct.service.converter.GeneProductDocConverterImpl; import uk.ac.ebi.quickgo.rest.controller.ControllerValidationHelper; import uk.ac.ebi.quickgo.rest.controller.ControllerValidationHelperImpl; import uk.ac.ebi.quickgo.rest.search.QueryStringSanitizer; import uk.ac.ebi.quickgo.rest.search.SolrQueryStringSanitizer; import uk.ac.ebi.quickgo.rest.service.ServiceHelper; import uk.ac.ebi.quickgo.rest.service.ServiceHelperImpl; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import static uk.ac.ebi.quickgo.rest.controller.ControllerValidationHelperImpl.MAX_PAGE_RESULTS; @Configuration @ComponentScan({"uk.ac.ebi.quickgo.geneproduct.service"}) @Import({GeneProductRepoConfig.class}) public class ServiceConfig { Logger LOGGER = LoggerFactory.getLogger(ServiceConfig.class); private static final boolean DEFAULT_XREF_VALIDATION_IS_CASE_SENSITIVE = true; @Value("${geneproduct.db.xref.valid.casesensitive:"+DEFAULT_XREF_VALIDATION_IS_CASE_SENSITIVE+"}") boolean xrefValidationCaseSensitive; @Value("${geneproduct.db.xref.valid.regexes}") private String xrefValidationRegexFile; @Bean public GeneProductService goGeneProductService(GeneProductRepository geneProductRepository) { return new GeneProductServiceImpl( serviceHelper(), geneProductRepository, geneProductDocConverter()); } @Bean public ControllerValidationHelper geneProductValidator() { return new ControllerValidationHelperImpl(MAX_PAGE_RESULTS, idValidator()); } private ServiceHelper serviceHelper() { return new ServiceHelperImpl(queryStringSanitizer()); } private GeneProductDocConverter geneProductDocConverter() { return new GeneProductDocConverterImpl(); } private QueryStringSanitizer queryStringSanitizer() { return new SolrQueryStringSanitizer(); } private DbXRefEntityValidation idValidator() { final List<DbXRefEntity> validationList = geneProductLoader().load(); LOGGER.info("Here is the contents of the file used for gene product validation"); validationList.stream().forEach(v -> LOGGER.info(v.toString())); return DbXRefEntityValidation.createWithData(validationList); } private DbXRefLoader geneProductLoader() { return new DbXRefLoader(this.xrefValidationRegexFile, xrefValidationCaseSensitive); } }
package net.sf.taverna.t2.workbench.models.graph.dot; import java.awt.Point; import java.awt.Rectangle; import java.io.StringReader; import java.util.ArrayList; import java.util.List; import net.sf.taverna.raven.log.Log; import net.sf.taverna.t2.workbench.models.graph.Graph; import net.sf.taverna.t2.workbench.models.graph.GraphController; import net.sf.taverna.t2.workbench.models.graph.GraphEdge; import net.sf.taverna.t2.workbench.models.graph.GraphElement; import net.sf.taverna.t2.workbench.models.graph.GraphNode; import net.sf.taverna.t2.workbench.models.graph.Graph.Alignment; /** * Lays out a graph from a DOT layout. * * @author David Withers */ public class GraphLayout implements DOTParserVisitor { private static Log logger = Log.getLogger(GraphLayout.class); private static final int BORDER = 10; private Rectangle bounds; // private Rectangle adjustedBounds; private double aspectRatio; private GraphController graphController; private int xOffset; private int yOffset; public Rectangle layoutGraph(GraphController graphController, Graph graph, String laidOutDot, double aspectRatio) throws ParseException { this.graphController = graphController; this.aspectRatio = aspectRatio; bounds = null; xOffset = 0; yOffset = 0; // logger.error(laidOutDot); DOTParser parser = new DOTParser(new StringReader(laidOutDot)); parser.parse().jjtAccept(this, graph); // int xOffset = (bounds.width - bounds.width) / 2; // int yOffset = (bounds.height - bounds.height) / 2; return new Rectangle(xOffset, yOffset, bounds.width, bounds.height); } public Object visit(SimpleNode node, Object data) { return node.childrenAccept(this, data); } public Object visit(ASTParse node, Object data) { return node.childrenAccept(this, data); } public Object visit(ASTGraph node, Object data) { return node.childrenAccept(this, data); } public Object visit(ASTStatementList node, Object data) { return node.childrenAccept(this, data); } public Object visit(ASTStatement node, Object data) { return node.childrenAccept(this, data); } public Object visit(ASTAttributeStatement node, Object data) { return node.childrenAccept(this, data); } public Object visit(ASTNodeStatement node, Object data) { GraphElement element = graphController.getElement(removeQuotes(node.name)); if (element != null) { return node.childrenAccept(this, element); } return node.childrenAccept(this, data); } public Object visit(ASTNodeId node, Object data) { return node.childrenAccept(this, data); } public Object visit(ASTPort node, Object data) { return node.childrenAccept(this, data); } public Object visit(ASTEdgeStatement node, Object data) { StringBuilder id = new StringBuilder(); id.append(removeQuotes(node.name)); if (node.port != null) { id.append(":"); id.append(removeQuotes(node.port)); } if (node.children != null) { for (Node child : node.children) { if (child instanceof ASTEdgeRHS) { NamedNode rhsNode = (NamedNode) child.jjtAccept(this, data); id.append("->"); id.append(removeQuotes(rhsNode.name)); if (rhsNode.port != null) { id.append(":"); id.append(removeQuotes(rhsNode.port)); } } } } GraphElement element = graphController.getElement(id.toString()); if (element != null) { return node.childrenAccept(this, element); } return node.childrenAccept(this, data); } public Object visit(ASTSubgraph node, Object data) { GraphElement element = graphController.getElement(removeQuotes(node.name).substring("cluster_".length())); if (element != null) { return node.childrenAccept(this, element); } return node.childrenAccept(this, data); } public Object visit(ASTEdgeRHS node, Object data) { return node; } public Object visit(ASTAttributeList node, Object data) { return node.childrenAccept(this, data); } public Object visit(ASTAList node, Object data) { if (data instanceof Graph) { Graph graph = (Graph) data; if ("bb".equalsIgnoreCase(node.name)) { Rectangle rect = getRectangle(node.value); if (rect.width == 0 && rect.height == 0) { rect.width = 500; rect.height = 500; } if (bounds == null) { bounds = calculateBounds(rect); rect = bounds; } graph.setWidth(rect.width); graph.setHeight(rect.height); graph.setPosition(new Point(rect.x, rect.y)); } else if ("lp".equalsIgnoreCase(node.name)) { graph.setLabelPosition(getPoint(node.value)); } } else if (data instanceof GraphNode) { GraphNode graphNode = (GraphNode) data; if ("width".equalsIgnoreCase(node.name)) { graphNode.setWidth(getSize(node.value)); } else if ("height".equalsIgnoreCase(node.name)) { graphNode.setHeight(getSize(node.value)); } else if ("pos".equalsIgnoreCase(node.name)) { Point position = getPoint(node.value); position.x = position.x - (graphNode.getWidth() / 2); position.y = position.y - (graphNode.getHeight() / 2); graphNode.setPosition(position); } else if ("rects".equalsIgnoreCase(node.name)) { List<Rectangle> rectangles = getRectangles(node.value); List<GraphNode> sinkNodes = graphNode.getSinkNodes(); if (graphController.getAlignment().equals(Alignment.HORIZONTAL)) { Rectangle rect = rectangles.remove(0); graphNode.setWidth(rect.width); graphNode.setHeight(rect.height); graphNode.setPosition(new Point(rect.x, rect.y)); } else { Rectangle rect = rectangles.remove(sinkNodes.size()); graphNode.setWidth(rect.width); graphNode.setHeight(rect.height); graphNode.setPosition(new Point(rect.x, rect.y)); } Point origin = graphNode.getPosition(); for (GraphNode sinkNode : sinkNodes) { Rectangle rect = rectangles.remove(0); sinkNode.setWidth(rect.width); sinkNode.setHeight(rect.height); sinkNode.setPosition(new Point(rect.x - origin.x, rect.y - origin.y)); } List<GraphNode> sourceNodes = graphNode.getSourceNodes(); for (GraphNode sourceNode : sourceNodes) { Rectangle rect = rectangles.remove(0); sourceNode.setWidth(rect.width); sourceNode.setHeight(rect.height); sourceNode.setPosition(new Point(rect.x - origin.x, rect.y - origin.y)); } } } else if (data instanceof GraphEdge) { GraphEdge graphEdge = (GraphEdge) data; if ("pos".equalsIgnoreCase(node.name)) { graphEdge.setPath(getPath(node.value)); } } return node.childrenAccept(this, data); } private Rectangle calculateBounds(Rectangle bounds) { bounds = new Rectangle(bounds); bounds.width += BORDER; bounds.height += BORDER; Rectangle newBounds = new Rectangle(bounds); double ratio = ((float) bounds.width) / ((float) bounds.height); double requiredRatio = aspectRatio; if (ratio > requiredRatio) { newBounds.height = (int) ((ratio / requiredRatio) * bounds.height); } else if (ratio < requiredRatio) { newBounds.width = (int) ((requiredRatio / ratio) * bounds.width); } xOffset = (newBounds.width - bounds.width) / 2; yOffset = (newBounds.height - bounds.height) / 2; xOffset += BORDER/2; yOffset += BORDER/2; return newBounds; } private List<Point> getPath(String value) { String[] points = removeQuotes(value).split(" "); List<Point> path = new ArrayList<Point>(); for (String point : points) { String[] coords = point.split(","); if (coords.length == 2) { int x = (int) Float.parseFloat(coords[0]) + xOffset; int y = (int) Float.parseFloat(coords[1]) + yOffset; path.add(new Point(x, flipY(y))); } } return path; } private int flipY(int y) { return bounds.height - y; } private List<Rectangle> getRectangles(String value) { List<Rectangle> rectangles = new ArrayList<Rectangle>(); String[] rects = value.split(" "); for (String rectangle : rects) { rectangles.add(getRectangle(rectangle)); } return rectangles; } private Rectangle getRectangle(String value) { String[] coords = removeQuotes(value).split(","); Rectangle rectangle = new Rectangle(); rectangle.x = (int) Float.parseFloat(coords[0]); rectangle.y = (int) Float.parseFloat(coords[3]); rectangle.width = (int) Float.parseFloat(coords[2]) - rectangle.x; rectangle.height = rectangle.y - (int) Float.parseFloat(coords[1]); rectangle.x += xOffset; rectangle.y += yOffset; if (bounds != null) { rectangle.y = flipY(rectangle.y); } else { rectangle.y = rectangle.height - rectangle.y; } return rectangle; } private Point getPoint(String value) { String[] coords = removeQuotes(value).split(","); return new Point((int) Float.parseFloat(coords[0]) + xOffset, flipY((int) Float.parseFloat(coords[1]) + yOffset)); } private int getSize(String value) { double size = Double.parseDouble(removeQuotes(value)); return (int) (size * 72); } private String removeQuotes(String value) { String result = value.trim(); if (result.startsWith("\"")) { result = result.substring(1); } if (result.endsWith("\"")) { result = result.substring(0, result.length() - 1); } result = result.replaceAll("\\\\", ""); return result; } }
package com.peterphi.std.guice.restclient.jaxb.webquery; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElementRef; import javax.xml.bind.annotation.XmlElementRefs; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.function.Consumer; import java.util.stream.Collectors; /** * Defines a group of constraints to be ANDed or ORred together */ @XmlRootElement(name = "ConstraintsGroup") @XmlType(name = "ConstraintGroupType") public class WQGroup extends WQConstraintLine { @XmlAttribute(required = true) public WQGroupType operator; @XmlElementRefs({@XmlElementRef(name = "constraint", type = WQConstraint.class), @XmlElementRef(name = "constraints", type = WQGroup.class)}) public List<WQConstraintLine> constraints = new ArrayList<>(); public WQGroup() { } public WQGroup(final WQGroupType operator) { this.operator = operator; } public WQGroup(final WQGroupType operator, final List<WQConstraintLine> constraints) { this(operator); this.constraints.addAll(constraints); } @Override public String toString() { return "WQGroup{" + operator + ", constraints=" + constraints + "} "; } // Constraints public WQGroup add(WQConstraintLine line) { constraints.add(line); return this; } /** * Assert that a field equals one of the provided values. Implicitly creates a new OR group if multiple values are supplied * * @param field * @param values * * @return */ public WQGroup eq(final String field, final Object... values) { if (values == null) { add(WQConstraint.eq(field, null)); } else if (values.length == 1) { add(WQConstraint.eq(field, values[0])); } else if (values.length > 1) { final WQGroup or = or(); for (Object value : values) or.eq(field, value); } return this; } /** * Assert that a field equals one of the provided values. Implicitly creates a new OR group if multiple values are supplied. * At least one value must be supplied. * * @param field * @param values * * @return */ public WQGroup eq(final String field, final Collection<?> values) { if (values == null) throw new IllegalArgumentException("Must supply at least one value to .eq when passing a Collection"); else if (values.size() == 0) return eq(field, values.stream().findFirst().get()); else { final WQGroup or = or(); for (Object value : values) or.eq(field, value); return this; } } public WQGroup neq(final String field, final Object value) { return add(WQConstraint.neq(field, value)); } public WQGroup isNull(final String field) { return add(WQConstraint.isNull(field)); } public WQGroup isNotNull(final String field) { return add(WQConstraint.isNotNull(field)); } public WQGroup lt(final String field, final Object value) { return add(WQConstraint.lt(field, value)); } public WQGroup le(final String field, final Object value) { return add(WQConstraint.le(field, value)); } public WQGroup gt(final String field, final Object value) { return add(WQConstraint.gt(field, value)); } public WQGroup ge(final String field, final Object value) { return add(WQConstraint.ge(field, value)); } public WQGroup contains(final String field, final Object value) { return add(WQConstraint.contains(field, value)); } public WQGroup startsWith(final String field, final Object value) { return add(WQConstraint.startsWith(field, value)); } public WQGroup range(final String field, final Object from, final Object to) { return add(WQConstraint.range(field, from, to)); } // Sub-groups /** * Construct a new AND group and return it for method chaining * * @return */ public WQGroup and() { final WQGroup and = WQGroup.newAnd(); add(and); return and; } /** * Construct a new OR group and return it for method chaining * * @return */ public WQGroup or() { final WQGroup and = WQGroup.newAnd(); add(and); return and; } /** * Construct a new AND group, using the supplier to add the constraints to the group. Returns the original {@link WQGroup} * for method chaining * * @param consumer * * @return */ public WQGroup and(Consumer<WQGroup> consumer) { final WQGroup and = WQGroup.newAnd(); add(and); // Let the consumer build their sub-constraints if (consumer != null) consumer.accept(and); return this; } /** * Construct a new OR group, using the supplier to add the constraints to the group. Returns the original {@link WQGroup} * for * method chaining * * @param consumer * * @return */ public WQGroup or(Consumer<WQGroup> consumer) { final WQGroup or = WQGroup.newOr(); add(or); // Let the consumer build their sub-constraints if (consumer != null) consumer.accept(or); return this; } // Helper constructors /** * Construct a new empty AND group * * @return */ public static WQGroup newAnd() { return new WQGroup(WQGroupType.AND); } /** * Construct a new empty OR group * * @return */ public static WQGroup newOr() { return new WQGroup(WQGroupType.OR); } @Override public String toQueryFragment() { if (constraints.size() == 1) return constraints.get(0).toQueryFragment(); else { final String operatorStr = " " + operator.name() + " "; return constraints.stream().map(WQConstraintLine:: toQueryFragment).collect(Collectors.joining(operatorStr, "(", ")")); } } }
package com.servinglynk.hmis.warehouse.config; import java.util.ArrayList; import java.util.List; import javax.annotation.PostConstruct; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.core.env.Environment; import org.springframework.http.converter.HttpMessageConverter; import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; import org.springframework.http.converter.xml.MarshallingHttpMessageConverter; import org.springframework.oxm.xstream.XStreamMarshaller; import org.springframework.scheduling.annotation.EnableScheduling; import org.springframework.transaction.annotation.EnableTransactionManagement; import org.springframework.web.client.RestTemplate; import org.springframework.web.multipart.commons.CommonsMultipartResolver; import org.springframework.web.servlet.config.annotation.EnableWebMvc; import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter; import com.servinglynk.hmis.warehouse.base.service.core.PropertyReaderServiceImpl; import com.servinglynk.hmis.warehouse.core.model.JSONObjectMapper; import com.servinglynk.hmis.warehouse.rest.BulkUploadController; import com.servinglynk.hmis.warehouse.rest.FileUploadController; @Configuration @Import({ com.servinglynk.hmis.warehouse.base.dao.config.BaseDatabaseConfig.class, com.servinglynk.hmis.warehouse.base.service.config.BaseServiceConfig.class, com.servinglynk.hmis.warehouse.base.dao.config.HibernateConfig.class}) @EnableWebMvc @EnableTransactionManagement @EnableScheduling public class RestConfig extends WebMvcConfigurerAdapter { public void configureMessageConverters( List<HttpMessageConverter<?>> messageConverters) { MappingJackson2HttpMessageConverter jmc = new MappingJackson2HttpMessageConverter(); jmc.setObjectMapper(new JSONObjectMapper()); messageConverters.add(jmc); messageConverters.add(createXmlHttpMessageConverter()); super.configureMessageConverters(messageConverters); } private HttpMessageConverter<Object> createXmlHttpMessageConverter() { MarshallingHttpMessageConverter xmlConverter = new MarshallingHttpMessageConverter(); XStreamMarshaller xstreamMarshaller = new XStreamMarshaller(); xmlConverter.setMarshaller(xstreamMarshaller); xmlConverter.setUnmarshaller(xstreamMarshaller); return xmlConverter; } @Bean public RestTemplate restTemplate() { RestTemplate restTemplate = new RestTemplate(); List<HttpMessageConverter<?>> messageConverters = new ArrayList<HttpMessageConverter<?>>(); MappingJackson2HttpMessageConverter jmc = new MappingJackson2HttpMessageConverter(); jmc.setObjectMapper(new JSONObjectMapper()); messageConverters.add(jmc); messageConverters.add(createXmlHttpMessageConverter()); restTemplate.setMessageConverters(messageConverters); return restTemplate; } @Autowired Environment env; @Bean PropertyReaderServiceImpl propertyReaderService(){ return new PropertyReaderServiceImpl(); } @Bean(name="multipartResolver") public CommonsMultipartResolver commonsMultipartResolver(){ CommonsMultipartResolver commonsMultipartResolver = new CommonsMultipartResolver(); commonsMultipartResolver.setDefaultEncoding("utf-8"); commonsMultipartResolver.setMaxUploadSize(403006744); return commonsMultipartResolver; } @Bean public FileUploadController fileUploadController() { return new FileUploadController(); } @Bean public BulkUploadController bulkUploadController() { return new BulkUploadController(); } @PostConstruct public void initializeDatabasePropertySourceUsage() { propertyReaderService().loadProperties("HMIS_AUTHORIZATION_SERVICE"); } }
package com.fasterxml.jackson.jr.ob.impl; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.Map; import java.util.TreeMap; import com.fasterxml.jackson.jr.ob.JSON; import com.fasterxml.jackson.jr.ob.impl.POJODefinition.Prop; import com.fasterxml.jackson.jr.ob.impl.POJODefinition.PropBuilder; /** * Helper class that jackson-jr uses by default to introspect POJO properties * (represented as {@link POJODefinition}) to build general POJO readers * (deserializers) and writers (serializers). *<p> * Note that most of the usage is via {@link ValueReaderLocator} and * {@link ValueWriterLocator} * * @since 2.11 */ public class BeanPropertyIntrospector { protected final static Prop[] NO_PROPS = new Prop[0]; private final static BeanPropertyIntrospector INSTANCE = new BeanPropertyIntrospector(); public BeanPropertyIntrospector() { } public static BeanPropertyIntrospector instance() { return INSTANCE; } public POJODefinition pojoDefinitionForDeserialization(JSONReader r, Class<?> pojoType) { return _construct(pojoType, r.features()); } public POJODefinition pojoDefinitionForSerialization(JSONWriter w, Class<?> pojoType) { return _construct(pojoType, w.features()); } private POJODefinition _construct(Class<?> beanType, int features) { Map<String,PropBuilder> propsByName = new TreeMap<String,PropBuilder>(); _introspect(beanType, propsByName, features); Constructor<?> defaultCtor = null; Constructor<?> stringCtor = null; Constructor<?> longCtor = null; for (Constructor<?> ctor : beanType.getDeclaredConstructors()) { Class<?>[] argTypes = ctor.getParameterTypes(); if (argTypes.length == 0) { defaultCtor = ctor; } else if (argTypes.length == 1) { Class<?> argType = argTypes[0]; if (argType == String.class) { stringCtor = ctor; } else if (argType == Long.class || argType == Long.TYPE) { longCtor = ctor; } else { continue; } } else { continue; } } final int len = propsByName.size(); Prop[] props; if (len == 0) { props = NO_PROPS; } else { props = new Prop[len]; int i = 0; for (PropBuilder builder : propsByName.values()) { props[i++] = builder.build(); } } return new POJODefinition(beanType, props, defaultCtor, stringCtor, longCtor); } private static void _introspect(Class<?> currType, Map<String, PropBuilder> props, int features) { if (currType == null || currType == Object.class) { return; } // First, check base type _introspect(currType.getSuperclass(), props, features); final boolean noStatics = JSON.Feature.INCLUDE_STATIC_FIELDS.isDisabled(features); // then public fields (since 2.8); may or may not be ultimately included // but at this point still possible for (Field f : currType.getDeclaredFields()) { if (!Modifier.isPublic(f.getModifiers()) || f.isEnumConstant() || f.isSynthetic()) { continue; } // Only include static members if (a) inclusion feature enabled and // (b) not final (cannot deserialize final fields) if (Modifier.isStatic(f.getModifiers()) && (noStatics || Modifier.isFinal(f.getModifiers()))) { continue; } _propFrom(props, f.getName()).withField(f); } // then get methods from within this class for (Method m : currType.getDeclaredMethods()) { final int flags = m.getModifiers(); // 13-Jun-2015, tatu: Skip synthetic, bridge methods altogether, for now // at least (add more complex handling only if absolutely necessary) if (Modifier.isStatic(flags) || m.isSynthetic() || m.isBridge()) { continue; } Class<?> argTypes[] = m.getParameterTypes(); if (argTypes.length == 0) { // getter? // getters must be public to be used if (!Modifier.isPublic(flags)) { continue; } Class<?> resultType = m.getReturnType(); if (resultType == Void.class) { continue; } String name = m.getName(); if (name.startsWith("get")) { if (name.length() > 3) { name = decap(name.substring(3)); _propFrom(props, name).withGetter(m); } } else if (name.startsWith("is")) { if (name.length() > 2) { // May or may not be used, but collect for now all the same: name = decap(name.substring(2)); _propFrom(props, name).withIsGetter(m); } } } else if (argTypes.length == 1) { // setter? // Non-public setters are fine if we can force access, don't yet check // let's also not bother about return type; setters that return value are fine String name = m.getName(); if (!name.startsWith("set") || name.length() == 3) { continue; } name = decap(name.substring(3)); _propFrom(props, name).withSetter(m); } } } private static PropBuilder _propFrom(Map<String,PropBuilder> props, String name) { PropBuilder prop = props.get(name); if (prop == null) { prop = Prop.builder(name); props.put(name, prop); } return prop; } private static String decap(String name) { char c = name.charAt(0); char lowerC = Character.toLowerCase(c); if (c != lowerC) { // First: do NOT lower case if more than one leading upper case letters: if ((name.length() == 1) || !Character.isUpperCase(name.charAt(1))) { char chars[] = name.toCharArray(); chars[0] = lowerC; return new String(chars); } } return name; } }
package jsprit.core.algorithm.recreate; import jsprit.core.algorithm.recreate.RegretInsertion.DefaultScorer; import jsprit.core.algorithm.recreate.RegretInsertion.ScoredJob; import jsprit.core.algorithm.recreate.RegretInsertion.ScoringFunction; import jsprit.core.problem.VehicleRoutingProblem; import jsprit.core.problem.job.Job; import jsprit.core.problem.solution.route.VehicleRoute; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.concurrent.*; /** * Insertion based on regret approach. * * <p>Basically calculates the insertion cost of the firstBest and the secondBest alternative. The score is then calculated as difference * between secondBest and firstBest, plus additional scoring variables that can defined in this.ScoringFunction. * The idea is that if the cost of the secondBest alternative is way higher than the first best, it seems to be important to insert this * customer immediatedly. If difference is not that high, it might not impact solution if this customer is inserted later. * * @author stefan schroeder * */ public class RegretInsertionConcurrent extends AbstractInsertionStrategy { private static Logger logger = LogManager.getLogger(RegretInsertionConcurrent.class); private ScoringFunction scoringFunction; private final JobInsertionCostsCalculator insertionCostsCalculator; private final ExecutorCompletionService<ScoredJob> completionService; /** * Sets the scoring function. * * <p>By default, the this.TimeWindowScorer is used. * * @param scoringFunction to score */ public void setScoringFunction(ScoringFunction scoringFunction) { this.scoringFunction = scoringFunction; } public RegretInsertionConcurrent(JobInsertionCostsCalculator jobInsertionCalculator, VehicleRoutingProblem vehicleRoutingProblem, ExecutorService executorService) { super(vehicleRoutingProblem); this.scoringFunction = new DefaultScorer(vehicleRoutingProblem); this.insertionCostsCalculator = jobInsertionCalculator; this.vrp = vehicleRoutingProblem; completionService = new ExecutorCompletionService<ScoredJob>(executorService); logger.debug("initialise " + this); } @Override public String toString() { return "[name=regretInsertion][additionalScorer="+scoringFunction+"]"; } /** * Runs insertion. * * <p>Before inserting a job, all unassigned jobs are scored according to its best- and secondBest-insertion plus additional scoring variables. * * @throws java.lang.RuntimeException if smth went wrong with thread execution * */ @Override public Collection<Job> insertUnassignedJobs(Collection<VehicleRoute> routes, Collection<Job> unassignedJobs) { List<Job> badJobs = new ArrayList<Job>(unassignedJobs.size()); List<Job> jobs = new ArrayList<Job>(unassignedJobs); while (!jobs.isEmpty()) { List<Job> unassignedJobList = new ArrayList<Job>(jobs); List<Job> badJobList = new ArrayList<Job>(); ScoredJob bestScoredJob = nextJob(routes, unassignedJobList, badJobList); if(bestScoredJob != null){ if(bestScoredJob.isNewRoute()){ routes.add(bestScoredJob.getRoute()); } insertJob(bestScoredJob.getJob(),bestScoredJob.getInsertionData(),bestScoredJob.getRoute()); jobs.remove(bestScoredJob.getJob()); } for(Job j : badJobList) { jobs.remove(j); badJobs.add(j); } } return badJobs; } private ScoredJob nextJob(final Collection<VehicleRoute> routes, List<Job> unassignedJobList, List<Job> badJobList) { ScoredJob bestScoredJob = null; for (final Job unassignedJob : unassignedJobList) { completionService.submit(new Callable<ScoredJob>() { @Override public ScoredJob call() throws Exception { return RegretInsertion.getScoredJob(routes, unassignedJob, insertionCostsCalculator, scoringFunction); } }); } try{ for(int i=0; i < unassignedJobList.size(); i++){ Future<ScoredJob> fsj = completionService.take(); ScoredJob sJob = fsj.get(); if(sJob instanceof RegretInsertion.BadJob){ badJobList.add(sJob.getJob()); continue; } if(bestScoredJob == null){ bestScoredJob = sJob; } else if(sJob.getScore() > bestScoredJob.getScore()){ bestScoredJob = sJob; } } } catch(InterruptedException e){ Thread.currentThread().interrupt(); } catch (ExecutionException e) { e.printStackTrace(); logger.error("Exception", e); throw new RuntimeException(e); } return bestScoredJob; } }
package org.junit.gen5.engine.junit5; import static org.junit.gen5.api.Assertions.assertTrue; import static org.junit.gen5.api.Assertions.fail; import static org.junit.gen5.api.Assumptions.assumeTrue; import static org.junit.gen5.engine.TestPlanSpecification.build; import static org.junit.gen5.engine.TestPlanSpecification.forClass; import static org.junit.gen5.engine.TestPlanSpecification.forUniqueId; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import org.junit.Assert; import org.junit.gen5.api.AfterEach; import org.junit.gen5.api.BeforeEach; import org.junit.gen5.api.Test; import org.junit.gen5.engine.TestPlanSpecification; /** * Core integration tests for the {@link JUnit5TestEngine}. * * @author Sam Brannen * @since 5.0 */ public class CoreJUnit5TestEngineTests extends AbstractJUnit5TestEngineTestCase { @org.junit.Test public void executeCompositeTestPlanSpecification() { TestPlanSpecification spec = build( forUniqueId("junit5:org.junit.gen5.engine.junit5.CoreJUnit5TestEngineTests$LocalTestCase#alwaysPasses()"), forClass(LocalTestCase.class)); TrackingTestExecutionListener listener = executeTests(spec, 8); Assert.assertEquals("# tests started", 7, listener.testStartedCount.get()); Assert.assertEquals("# tests succeeded", 4, listener.testSucceededCount.get()); Assert.assertEquals("# tests skipped", 0, listener.testSkippedCount.get()); Assert.assertEquals("# tests aborted", 1, listener.testAbortedCount.get()); Assert.assertEquals("# tests failed", 2, listener.testFailedCount.get()); } @org.junit.Test public void executeTestsForClass() { LocalTestCase.countAfterInvoked = 0; TrackingTestExecutionListener listener = executeTestsForClass(LocalTestCase.class, 8); Assert.assertEquals("# tests started", 7, listener.testStartedCount.get()); Assert.assertEquals("# tests succeeded", 4, listener.testSucceededCount.get()); Assert.assertEquals("# tests skipped", 0, listener.testSkippedCount.get()); Assert.assertEquals("# tests aborted", 1, listener.testAbortedCount.get()); Assert.assertEquals("# tests failed", 2, listener.testFailedCount.get()); Assert.assertEquals("# after calls", 7, LocalTestCase.countAfterInvoked); } @org.junit.Test public void executeTestForUniqueId() { TestPlanSpecification spec = build( forUniqueId("junit5:org.junit.gen5.engine.junit5.CoreJUnit5TestEngineTests$LocalTestCase#alwaysPasses()")); TrackingTestExecutionListener listener = executeTests(spec, 2); Assert.assertEquals("# tests started", 1, listener.testStartedCount.get()); Assert.assertEquals("# tests succeeded", 1, listener.testSucceededCount.get()); Assert.assertEquals("# tests skipped", 0, listener.testSkippedCount.get()); Assert.assertEquals("# tests aborted", 0, listener.testAbortedCount.get()); Assert.assertEquals("# tests failed", 0, listener.testFailedCount.get()); } @org.junit.Test public void executeTestForUniqueIdWithExceptionThrownInAfterMethod() { TestPlanSpecification spec = build(forUniqueId( "junit5:org.junit.gen5.engine.junit5.CoreJUnit5TestEngineTests$LocalTestCase#throwExceptionInAfterMethod()")); TrackingTestExecutionListener listener = executeTests(spec, 2); Assert.assertEquals("# tests started", 1, listener.testStartedCount.get()); Assert.assertEquals("# tests succeeded", 0, listener.testSucceededCount.get()); Assert.assertEquals("# tests skipped", 0, listener.testSkippedCount.get()); Assert.assertEquals("# tests aborted", 0, listener.testAbortedCount.get()); Assert.assertEquals("# tests failed", 1, listener.testFailedCount.get()); } private static abstract class AbstractTestCase { @Test void fromSuperclass() { /* no-op */ } } private static class LocalTestCase extends AbstractTestCase { static boolean staticBeforeInvoked = false; boolean beforeInvoked = false; boolean throwExceptionInAfterMethod = false; static int countAfterInvoked = 0; @BeforeEach static void staticBefore() { staticBeforeInvoked = true; } @BeforeEach void before() { this.beforeInvoked = true; // Reset state, since the test instance is retained across all test methods; // otherwise, after() always throws an exception. this.throwExceptionInAfterMethod = false; } @AfterEach void after() { countAfterInvoked++; if (this.throwExceptionInAfterMethod) { throw new RuntimeException("Exception thrown from @AfterEach method"); } } @Test void methodLevelCallbacks() { assertTrue(this.beforeInvoked, "@BeforeEach was not invoked on instance method"); assertTrue(staticBeforeInvoked, "@BeforeEach was not invoked on static method"); } @Test void throwExceptionInAfterMethod() { this.throwExceptionInAfterMethod = true; } @Test void alwaysPasses() { /* no-op */ } @CustomTestAnnotation void customTestAnnotation() { /* no-op */ } @Test void aborted() { assumeTrue(false); } @Test void alwaysFails() { fail("#fail"); } } @Test @Retention(RetentionPolicy.RUNTIME) @interface CustomTestAnnotation { } }
package org.languagetool.tokenizers; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Tokenizes text into sentences by looking for typical end-of-sentence markers, * but considering exceptions (e.g. abbreviations). * * @author Daniel Naber * @deprecated use {@link org.languagetool.tokenizers.SRXSentenceTokenizer} instead (deprecated since LT 1.8) */ @SuppressWarnings("AssignmentToMethodParameter") public class RegexSentenceTokenizer implements SentenceTokenizer { // end of sentence marker: protected static final String EOS = "\0"; //private final static String EOS = "#"; // for testing only protected static final String P = "[\\.!?]"; // PUNCTUATION protected static final String AP = "(?:'|«|\"||\\)|\\]|\\})?"; // AFTER PUNCTUATION protected static final String PAP = P + AP; protected static final String PARENS = "[\\(\\)\\[\\]]"; // parentheses // Check out the private methods for comments and examples about these // regular expressions: private Pattern paragraph; private static final Pattern paragraphByTwoLineBreaks = Pattern.compile("([\\n\\r]\\s*[\\n\\r])"); private static final Pattern paragraphByLineBreak = Pattern.compile("([\\n\\r])"); // add unbreakable field, for example footnote, if it's at the end of the sentence private static final Pattern punctWhitespace = Pattern.compile("(" + PAP + "(\u0002)?\\s)"); // \p{Lu} = uppercase, with obeying Unicode (\p{Upper} is just US-ASCII!): private static final Pattern punctUpperLower = Pattern.compile("(" + PAP + ")([\\p{Lu}][^\\p{Lu}.])"); private static final Pattern letterPunct = Pattern.compile("(\\s[\\wüöäÜÖÄß]" + P + ")"); private static final Pattern abbrev1 = Pattern.compile("([^-\\wüöäÜÖÄß][\\wüöäÜÖÄß]" + PAP + "\\s)" + EOS); private static final Pattern abbrev2 = Pattern.compile("([^-\\wüöäÜÖÄß][\\wüöäÜÖÄß]" + P + ")" + EOS); private static final Pattern abbrev3 = Pattern.compile("(\\s[\\wüöäÜÖÄß]\\.\\s+)" + EOS); private static final Pattern abbrev4 = Pattern.compile("(\\.\\.\\. )" + EOS + "([\\p{Ll}])"); private static final Pattern abbrev5 = Pattern.compile("(['\"]" + P + "['\"]\\s+)" + EOS); private static final Pattern abbrev6 = Pattern.compile("([\"']\\s*)" + EOS + "(\\s*[\\p{Ll}])"); private static final Pattern abbrev7 = Pattern.compile("(\\s" + PAP + "\\s)" + EOS); // z.b. 3.10. (im Datum): private static final Pattern abbrev8 = Pattern.compile("(\\d{1,2}\\.\\d{1,2}\\.\\s+)" + EOS); private static final Pattern repair1 = Pattern.compile("('[\\wüöäÜÖÄß]" + P + ")(\\s)"); private static final Pattern repair2 = Pattern.compile("(\\sno\\.)(\\s+)(?!\\d)"); private static final Pattern repair3 = Pattern.compile("([ap]\\.m\\.\\s+)([\\p{Lu}])"); private static final Pattern repair10 = Pattern.compile("([\\(\\[])([!?]+)([\\]\\)]) " + EOS); private static final Pattern repair11 = Pattern.compile("([!?]+)([\\)\\]]) " + EOS); private static final Pattern repair12 = Pattern.compile("(" + PARENS + ") " + EOS); // some abbreviations: private static final String[] ABBREV_LIST = { // English -- but these work globally for all languages: "Mr", "Mrs", "No", "pp", "St", "no", "Sr", "Jr", "Bros", "etc", "vs", "esp", "Fig", "fig", "Jan", "Feb", "Mar", "Apr", "Jun", "Jul", "Aug", "Sep", "Sept", "Oct", "Okt", "Nov", "Dec", "Ph.D", "PhD", "al", // in "et al." "cf", "Inc", "Ms", "Gen", "Sen", "Prof", "Corp", "Co" }; private final Set<Pattern> abbreviationPatterns = new HashSet<>(); /** * Month names like "Dezember" that should not be considered a sentence * boundary in string like "13. Dezember". May also contain other * words that indicate there's no sentence boundary when preceded * by a number and a dot. */ protected String[] monthNames; /** * Create a sentence tokenizer that uses the built-in abbreviations. */ public RegexSentenceTokenizer() { this(new String[]{}); } /** * Create a sentence tokenizer with the given list of abbreviations, * additionally to the built-in ones. */ public RegexSentenceTokenizer(final String[] abbrevList) { final List<String> allAbbreviations = new ArrayList<>(); allAbbreviations.addAll(Arrays.asList(abbrevList)); allAbbreviations.addAll(Arrays.asList(ABBREV_LIST)); for (String element : allAbbreviations) { final Pattern pattern = Pattern.compile("(\\b" + element + PAP + "\\s)" + EOS); abbreviationPatterns.add(pattern); } setSingleLineBreaksMarksParagraph(false); } /** * @param lineBreakParagraphs if <code>true</code>, single lines breaks are assumed to end a paragraph, * with <code>false</code>, only two ore more consecutive line breaks end a paragraph */ @Override public void setSingleLineBreaksMarksParagraph(final boolean lineBreakParagraphs) { if (lineBreakParagraphs) { paragraph = paragraphByLineBreak; } else { paragraph = paragraphByTwoLineBreaks; } } @Override public boolean singleLineBreaksMarksPara() { return paragraph == paragraphByLineBreak; } /** * Tokenize the given string to sentences. */ @Override public List<String> tokenize(String s) { s = firstSentenceSplitting(s); s = removeFalseEndOfSentence(s); s = splitUnsplitStuff(s); final StringTokenizer stringTokenizer = new StringTokenizer(s, EOS); final List<String> l = new ArrayList<>(); while (stringTokenizer.hasMoreTokens()) { final String sentence = stringTokenizer.nextToken(); l.add(sentence); } return l; } /** * Add a special break character at all places with typical sentence delimiters. */ private String firstSentenceSplitting(String s) { // Double new-line means a new sentence: s = paragraph.matcher(s).replaceAll("$1" + EOS); // Punctuation followed by whitespace means a new sentence: s = punctWhitespace.matcher(s).replaceAll("$1" + EOS); // New (compared to the perl module): Punctuation followed by uppercase followed // by non-uppercase character (except dot) means a new sentence: s = punctUpperLower.matcher(s).replaceAll("$1" + EOS + "$2"); // Break also when single letter comes before punctuation: s = letterPunct.matcher(s).replaceAll("$1" + EOS); return s; } /** * Repair some positions that don't require a split, i.e. remove the special break character at * those positions. */ protected String removeFalseEndOfSentence(String s) { // Don't split at e.g. "U. S. A.": s = abbrev1.matcher(s).replaceAll("$1"); // Don't split at e.g. "U.S.A.": s = abbrev2.matcher(s).replaceAll("$1"); // Don't split after a white-space followed by a single letter followed // by a dot followed by another whitespace. // e.g. " p. " s = abbrev3.matcher(s).replaceAll("$1"); // Don't split at "bla bla... yada yada" (TODO: use \.\.\.\s+ instead?) s = abbrev4.matcher(s).replaceAll("$1$2"); // Don't split [.?!] when the're quoted: s = abbrev5.matcher(s).replaceAll("$1"); // Don't split at abbreviations: for (final Pattern abbrevPattern : abbreviationPatterns) { final Matcher matcher = abbrevPattern.matcher(s); s = matcher.replaceAll("$1"); } // Don't break after quote unless there's a capital letter: // e.g.: "That's right!" he said. s = abbrev6.matcher(s).replaceAll("$1$2"); // e.g. "Das ist . so." -> assume one sentence s = abbrev7.matcher(s).replaceAll("$1"); // e.g. "Das ist . so." -> assume one sentence s = abbrev8.matcher(s).replaceAll("$1"); // extension by dnaber --commented out, doesn't help: // text = re.compile("(:\s+)%s(\s*[%s])" % (self.EOS, string.lowercase), // re.DOTALL).sub("\\1\\2", text) // "13. Dezember" etc. -> no sentence boundary: if (monthNames != null) { for (String element : monthNames) { s = s.replaceAll("(\\d+\\.) " + EOS + "(" + element + ")", "$1 $2"); } } // z.B. "Das hier ist ein(!) Satz." s = repair10.matcher(s).replaceAll("$1$2$3 "); // z.B. "Das hier ist (genau!) ein Satz." s = repair11.matcher(s).replaceAll("$1$2 "); // z.B. "bla (...) blubb" -> no sentence boundary s = repair12.matcher(s).replaceAll("$1 "); return s; } /** * Treat some more special cases that make up a sentence boundary. Insert the special break * character at these positions. */ private String splitUnsplitStuff(String s) { // Split e.g.: He won't. #Really. s = repair1.matcher(s).replaceAll("$1" + EOS + "$2"); // Split e.g.: He won't say no. Not really. s = repair2.matcher(s).replaceAll("$1" + EOS + "$2"); // Split at "a.m." or "p.m." followed by a capital letter. s = repair3.matcher(s).replaceAll("$1" + EOS + "$2"); return s; } }
package org.grouplens.lenskit.core; import org.grouplens.grapht.graph.Edge; import org.grouplens.grapht.graph.Graph; import org.grouplens.grapht.graph.Node; import org.grouplens.grapht.spi.CachedSatisfaction; import org.grouplens.grapht.spi.Satisfaction; import org.grouplens.grapht.spi.reflect.InstanceSatisfaction; import org.grouplens.lenskit.ItemRecommender; import org.grouplens.lenskit.RatingPredictor; import org.grouplens.lenskit.RecommenderBuildException; import org.grouplens.lenskit.baseline.BaselinePredictor; import org.grouplens.lenskit.baseline.BaselineRatingPredictor; import org.grouplens.lenskit.baseline.ConstantPredictor; import org.grouplens.lenskit.baseline.GlobalMeanPredictor; import org.grouplens.lenskit.data.Event; import org.grouplens.lenskit.data.dao.DAOFactory; import org.grouplens.lenskit.data.dao.EventCollectionDAO; import org.grouplens.lenskit.params.ThresholdValue; import org.grouplens.lenskit.iterative.ThresholdStoppingCondition; import org.junit.Before; import org.junit.Test; import javax.inject.Inject; import java.io.File; import java.io.IOException; import java.util.Collections; import static org.hamcrest.Matchers.*; import static org.junit.Assert.assertThat; /** * @author Michael Ekstrand */ public class LenskitRecommenderEngineTest { private LenskitRecommenderEngineFactory factory; private DAOFactory daoFactory; @Before public void setup() { daoFactory = new EventCollectionDAO.Factory(Collections.<Event>emptyList()); factory = new LenskitRecommenderEngineFactory(daoFactory); } @Test public void testBasicRec() throws RecommenderBuildException { configureBasicRecommender(); LenskitRecommenderEngine engine = factory.create(); verifyBasicRecommender(engine); } private void configureBasicRecommender() { factory.bind(RatingPredictor.class) .to(BaselineRatingPredictor.class); factory.bind(ItemRecommender.class) .to(ScoreBasedItemRecommender.class); factory.bind(BaselinePredictor.class) .to(ConstantPredictor.class); } private void verifyBasicRecommender(LenskitRecommenderEngine engine) {LenskitRecommender rec = engine.open(); try { assertThat(rec.getItemRecommender(), instanceOf(ScoreBasedItemRecommender.class)); assertThat(rec.getItemScorer(), instanceOf(BaselineRatingPredictor.class)); assertThat(rec.getRatingPredictor(), instanceOf(BaselineRatingPredictor.class)); assertThat(rec.get(BaselinePredictor.class), instanceOf(ConstantPredictor.class)); } finally { rec.close(); } } @Test public void testArbitraryRoot() throws RecommenderBuildException { factory.bind(BaselinePredictor.class) .to(ConstantPredictor.class); factory.addRoot(BaselinePredictor.class); LenskitRecommenderEngine engine = factory.create(); LenskitRecommender rec = engine.open(); try { assertThat(rec.get(BaselinePredictor.class), instanceOf(ConstantPredictor.class)); } finally { rec.close(); } } @Test public void testSeparatePredictor() throws RecommenderBuildException { factory.bind(BaselinePredictor.class) .to(GlobalMeanPredictor.class); factory.bind(RatingPredictor.class) .to(BaselineRatingPredictor.class); LenskitRecommenderEngine engine = factory.create(); LenskitRecommender rec1 = engine.open(); LenskitRecommender rec2 = engine.open(); try { assertThat(rec1.getRatingPredictor(), instanceOf(BaselineRatingPredictor.class)); assertThat(rec2.getRatingPredictor(), instanceOf(BaselineRatingPredictor.class)); // verify that recommenders have different predictors assertThat(rec1.getRatingPredictor(), not(sameInstance(rec2.getRatingPredictor()))); // verify that recommenders have same baseline assertThat(rec1.get(BaselinePredictor.class), sameInstance(rec2.get(BaselinePredictor.class))); } finally { rec1.close(); rec2.close(); } } @SuppressWarnings("unchecked") @Test public void testParameter() throws RecommenderBuildException { factory.set(ThresholdValue.class).to(0.01); factory.addRoot(ThresholdStoppingCondition.class); LenskitRecommenderEngine engine = factory.create(); LenskitRecommender rec = engine.open(); ThresholdStoppingCondition stop = rec.get(ThresholdStoppingCondition.class); assertThat(stop, notNullValue()); assertThat(stop.getThreshold(), closeTo(0.01, 1.0e-6)); } @SuppressWarnings({"rawtypes"}) private void assertNodeNotEVDao(Node node) { CachedSatisfaction lbl = node.getLabel(); if (lbl == null) { return; } Satisfaction sat = lbl.getSatisfaction(); if (sat instanceof InstanceSatisfaction) { assertThat((Class) sat.getErasedType(), not(equalTo((Class) EventCollectionDAO.class))); } } /** * Test that no instance satisfaction contains an event collection DAO reference. */ @Test public void testBasicNoInstance() throws RecommenderBuildException, IOException, ClassNotFoundException { configureBasicRecommender(); LenskitRecommenderEngine engine = factory.create(); Graph g = engine.getDependencies(); // make sure we have no record of an instance dao for (Node n: g.getNodes()) { assertNodeNotEVDao(n); for (Edge e: g.getOutgoingEdges(n)) { assertNodeNotEVDao(e.getTail()); } for (Edge e: g.getIncomingEdges(n)) { assertNodeNotEVDao(e.getHead()); } } } @Test public void testSerialize() throws RecommenderBuildException, IOException, ClassNotFoundException { configureBasicRecommender(); LenskitRecommenderEngine engine = factory.create(); File tfile = File.createTempFile("lenskit", "engine"); try { engine.write(tfile); LenskitRecommenderEngine e2 = LenskitRecommenderEngine.load(daoFactory, tfile); verifyBasicRecommender(e2); } finally { tfile.delete(); } } /** * Verify that we can inject subclassed DAOs. */ @Test public void testSubclassedDAO() throws RecommenderBuildException { factory.addRoot(SubclassedDAODepComponent.class); LenskitRecommenderEngine engine = factory.create(); LenskitRecommender rec = engine.open(); try { SubclassedDAODepComponent dep = rec.get(SubclassedDAODepComponent.class); assertThat(dep, notNullValue()); assertThat(dep.dao, notNullValue()); } finally { rec.close(); } } public static class SubclassedDAODepComponent { private final EventCollectionDAO dao; @Inject public SubclassedDAODepComponent(EventCollectionDAO dao) { this.dao = dao; } } }
package org.sagebionetworks.repo.model.jdo; import java.lang.reflect.Field; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.sagebionetworks.repo.model.DatastoreException; import org.sagebionetworks.repo.model.FieldTypeDAO; import org.sagebionetworks.repo.model.InvalidModelException; import org.sagebionetworks.repo.model.Node; import org.sagebionetworks.repo.model.jdo.persistence.JDOAnnotationType; import org.sagebionetworks.repo.model.query.FieldType; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.orm.jdo.JdoTemplate; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; /** * Represents the annotation types associated with each annotation name. * * @author jmhill * */ @Transactional(readOnly = true) public class JDOFieldTypeDAOImpl implements FieldTypeDAO, InitializingBean { @Autowired JdoTemplate jdoTemplate; // match one or more whitespace characters private static final Pattern ALLOWABLE_CHARS = Pattern.compile("^[a-z,A-Z,0-9,_,.]+"); /** * Since the types never change once they are set, we can safely cache the results. */ private Map<String, FieldType> localCache = Collections.synchronizedMap(new HashMap<String, FieldType>()); public JDOFieldTypeDAOImpl(){ } /** * Used for a mocking unit test. * @param mockTemplate */ public JDOFieldTypeDAOImpl(JdoTemplate mockTemplate) { jdoTemplate = mockTemplate; } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) @Override public boolean addNewType(String name, FieldType type) throws DatastoreException, InvalidModelException { if(name == null) throw new IllegalArgumentException("Name cannot be null"); if(type == null) throw new IllegalArgumentException("FieldType cannot be null"); // check the name name = checkKeyName(name); // First check the local cache FieldType currentType = localCache.get(type.name()); if(currentType != null){ validateType(name, type, currentType); return true; } // First determine if this type already exists try { JDOAnnotationType exists = jdoTemplate.getObjectById(JDOAnnotationType.class, name); currentType = FieldType.valueOf(exists.getTypeClass()); validateType(name, type, currentType); return true; } catch (Exception e) { // this means the type does not exist so create it JDOAnnotationType jdoType = new JDOAnnotationType(); jdoType.setAttributeName(name); jdoType.setTypeClass(type.toString()); jdoTemplate.makePersistent(jdoType); // Add this to the local map localCache.put(name, type); return false; } } /** * Validate that the passed type matches the current type. * @param name * @param nodeType * @param currentType * @throws DatastoreException */ private static void validateType(String name, FieldType newType, FieldType currentType) throws DatastoreException { if (newType != currentType) { throw new DatastoreException("The annotation name: " + name + " cannot be used for a type of: " + newType.name() + " because it has already been used for a type of: " + currentType.name()); } } @Transactional(readOnly = true) @Override public FieldType getTypeForName(String name) { // Since the values never change we can first look it up in the local // cache FieldType type = localCache.get(name); if (type != null) return type; // Look it up in the db try { JDOAnnotationType exists = jdoTemplate.getObjectById(JDOAnnotationType.class, name); type = FieldType.valueOf(exists.getTypeClass()); // Add this to the local cache localCache.put(name, type); return type; } catch (Exception e) { // this means the type does not exist return FieldType.DOES_NOT_EXIST; } } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) @Override public void delete(String name) { JDOAnnotationType mapping = jdoTemplate.getObjectById(JDOAnnotationType.class, name); jdoTemplate.deletePersistent(mapping); localCache.remove(name); } /** * Validate the name * @param key * @throws InvalidModelException */ static String checkKeyName(String key) throws InvalidModelException { if(key == null) throw new InvalidModelException("Annotation names cannot be null"); key = key.trim(); if("".equals(key)) throw new InvalidModelException("Annotation names cannot be empty strings"); Matcher matcher = ALLOWABLE_CHARS.matcher(key); if (!matcher.matches()) { throw new InvalidModelException("Invalid annotation name: '"+key+"'. Annotation names may only contain; letters, numbers, '_' and '.'"); } return key; } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) @Override public void afterPropertiesSet() throws Exception { // Make sure the primary Node fields are in place Field[] fields = Node.class.getDeclaredFields(); for(Field field: fields){ this.addNewType(field.getName(), FieldType.PRIMARY_FIELD); } } }
package com.nhl.link.move.runtime.task.create; import com.nhl.link.move.CreateBuilder; import com.nhl.link.move.LmTask; import com.nhl.link.move.annotation.AfterSourceRowsConverted; import com.nhl.link.move.annotation.AfterTargetsCommitted; import com.nhl.link.move.annotation.AfterTargetsMapped; import com.nhl.link.move.annotation.AfterTargetsMerged; import com.nhl.link.move.extractor.model.ExtractorName; import com.nhl.link.move.runtime.cayenne.ITargetCayenneService; import com.nhl.link.move.runtime.extractor.IExtractorService; import com.nhl.link.move.runtime.task.BaseTaskBuilder; import com.nhl.link.move.runtime.task.ListenersBuilder; import com.nhl.link.move.runtime.task.createorupdate.RowConverter; import com.nhl.link.move.runtime.token.ITokenManager; import org.apache.cayenne.DataObject; /** * @param <T> * @since 2.6 */ public class DefaultCreateBuilder<T extends DataObject> extends BaseTaskBuilder implements CreateBuilder<T> { private CreateTargetMapper<T> mapper; private CreateTargetMerger<T> merger; private ITokenManager tokenManager; private ExtractorName extractorName; private ListenersBuilder stageListenersBuilder; private IExtractorService extractorService; private ITargetCayenneService targetCayenneService; private RowConverter rowConverter; public DefaultCreateBuilder( CreateTargetMapper<T> mapper, CreateTargetMerger<T> merger, RowConverter rowConverter, ITargetCayenneService targetCayenneService, IExtractorService extractorService, ITokenManager tokenManager) { this.mapper = mapper; this.merger = merger; this.tokenManager = tokenManager; this.extractorService = extractorService; this.targetCayenneService = targetCayenneService; this.rowConverter = rowConverter; this.stageListenersBuilder = createListenersBuilder(); // always add stats listener.. stageListener(CreateStatsListener.instance()); } ListenersBuilder createListenersBuilder() { return new ListenersBuilder( AfterSourceRowsConverted.class, AfterTargetsMapped.class, AfterTargetsMerged.class, AfterTargetsCommitted.class); } @Override public CreateBuilder<T> sourceExtractor(String location, String name) { this.extractorName = ExtractorName.create(location, name); return this; } @Override public CreateBuilder<T> batchSize(int batchSize) { this.batchSize = batchSize; return this; } @Override public CreateBuilder<T> stageListener(Object listener) { stageListenersBuilder.addListener(listener); return this; } @Override public LmTask task() throws IllegalStateException { if (extractorName == null) { throw new IllegalStateException("Required 'extractorName' is not set"); } return new CreateTask<T>(extractorName, batchSize, targetCayenneService, extractorService, tokenManager, createProcessor()); } private CreateSegmentProcessor<T> createProcessor() { return new CreateSegmentProcessor<>(rowConverter, mapper, merger, stageListenersBuilder.getListeners()); } }
package io.subutai.common.task; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import io.subutai.common.host.HostArchitecture; import io.subutai.common.peer.ContainerSize; import io.subutai.common.settings.Common; public class CloneRequest implements TaskRequest { private final String resourceHostId; private final String hostname; private final String containerName; private final String ip; private final String environmentId; private final String initiatorPeerId; private final String ownerId; private final String templateName; private final HostArchitecture templateArch; private final ContainerSize containerSize; public CloneRequest( final String resourceHostId, final String hostname, final String containerName, final String ip, final String environmentId, final String initiatorPeerId, final String ownerId, final String templateName, HostArchitecture templateArch, final ContainerSize containerSize ) { Preconditions.checkNotNull( resourceHostId ); Preconditions.checkArgument( !Strings.isNullOrEmpty( hostname ) ); Preconditions.checkNotNull( templateName ); Preconditions.checkArgument( !Strings.isNullOrEmpty( ip ) && ip.matches( Common.CIDR_REGEX ) ); this.resourceHostId = resourceHostId; this.hostname = hostname; this.containerName = containerName; this.ip = ip; this.environmentId = environmentId; this.initiatorPeerId = initiatorPeerId; this.ownerId = ownerId; this.templateName = templateName; this.templateArch = templateArch; this.containerSize = containerSize; } public String getResourceHostId() { return resourceHostId; } public String getHostname() { return hostname; } public String getContainerName() { return containerName; } public String getIp() { return ip; } public String getEnvironmentId() { return environmentId; } public String getInitiatorPeerId() { return initiatorPeerId; } public String getOwnerId() { return ownerId; } public String getTemplateName() { return templateName; } public HostArchitecture getTemplateArch() { return templateArch; } public ContainerSize getContainerSize() { return containerSize; } @Override public String toString() { final StringBuffer sb = new StringBuffer( "CloneRequest{" ); sb.append( "resourceHostId='" ).append( resourceHostId ).append( '\'' ); sb.append( ", hostname='" ).append( hostname ).append( '\'' ); sb.append( ", containerName='" ).append( containerName ).append( '\'' ); sb.append( ", ip='" ).append( ip ).append( '\'' ); sb.append( ", environmentId='" ).append( environmentId ).append( '\'' ); sb.append( ", initiatorPeerId='" ).append( initiatorPeerId ).append( '\'' ); sb.append( ", ownerId='" ).append( ownerId ).append( '\'' ); sb.append( ", templateName='" ).append( templateName ).append( '\'' ); sb.append( ", templateArch=" ).append( templateArch ); sb.append( ", containerSize=" ).append( containerSize ); sb.append( '}' ); return sb.toString(); } }
package org.mockserver.mock.action; import org.mockserver.model.HttpClassCallback; import org.mockserver.model.HttpRequest; import org.mockserver.model.HttpResponse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import static org.mockserver.model.HttpResponse.notFoundResponse; /** * @author jamesdbloom */ public class HttpCallbackActionHandler { private final Logger logger = LoggerFactory.getLogger(this.getClass()); public HttpResponse handle(HttpClassCallback httpClassCallback, HttpRequest httpRequest) { return sendRequest(httpClassCallback, httpRequest); } private ExpectationCallback instantiateCallback(HttpClassCallback httpClassCallback) { try { Class expectationCallbackClass = Class.forName(httpClassCallback.getCallbackClass()); if (ExpectationCallback.class.isAssignableFrom(expectationCallbackClass)) { Constructor<? extends ExpectationCallback> constructor = expectationCallbackClass.getConstructor(); return constructor.newInstance(); } } catch (ClassNotFoundException e) { logger.error("ClassNotFoundException - while trying to instantiate ExpectationCallback class \"" + httpClassCallback.getCallbackClass() + "\"", e); } catch (NoSuchMethodException e) { logger.error("NoSuchMethodException - while trying to create default constructor on ExpectationCallback class \"" + httpClassCallback.getCallbackClass() + "\"", e); } catch (InvocationTargetException e) { logger.error("InvocationTargetException - while trying to execute default constructor on ExpectationCallback class \"" + httpClassCallback.getCallbackClass() + "\"", e); } catch (InstantiationException e) { logger.error("InvocationTargetException - while trying to execute default constructor on ExpectationCallback class \"" + httpClassCallback.getCallbackClass() + "\"", e); } catch (IllegalAccessException e) { logger.error("InvocationTargetException - while trying to execute default constructor on ExpectationCallback class \"" + httpClassCallback.getCallbackClass() + "\"", e); } return null; } private HttpResponse sendRequest(HttpClassCallback httpClassCallback, HttpRequest httpRequest) { if (httpRequest != null) { ExpectationCallback expectationCallback = instantiateCallback(httpClassCallback); if (expectationCallback != null) { return expectationCallback.handle(httpRequest); } else { return notFoundResponse(); } } else { return notFoundResponse(); } } }
package org.mockserver.maven; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import org.apache.commons.lang3.StringUtils; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.resolver.ArtifactResolutionRequest; import org.apache.maven.artifact.resolver.ArtifactResolver; import org.apache.maven.model.Dependency; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.repository.RepositorySystem; import org.mockserver.cli.Main; import org.mockserver.configuration.ConfigurationProperties; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.TimeUnit; /** * @author jamesdbloom */ @Mojo(name = "runForked", requiresProject = false, threadSafe = false) public class MockServerRunForkedMojo extends MockServerAbstractMojo { /** * Get a list of artifacts used by this plugin */ @Parameter(defaultValue = "${plugin.artifacts}", required = true, readonly = true) protected List<Artifact> pluginArtifacts; /** * Used to look up Artifacts in the remote repository. */ @Component protected RepositorySystem repositorySystem; /** * Used to look up Artifacts in the remote repository. */ @Component protected ArtifactResolver artifactResolver; private ProcessBuildFactory processBuildFactory = new ProcessBuildFactory(); public static String fileSeparators(String path) { StringBuilder ret = new StringBuilder(); for (char c : path.toCharArray()) { if ((c == '/') || (c == '\\')) { ret.append(File.separatorChar); } else { ret.append(c); } } return ret.toString(); } public void execute() throws MojoExecutionException { if (skip) { getLog().info("Skipping plugin execution"); } else { if (getLog().isInfoEnabled()) { getLog().info("mockserver:runForked about to start MockServer on: " + (serverPort != -1 ? " serverPort " + serverPort : "") + (proxyPort != -1 ? " proxyPort " + proxyPort : "") ); } List<String> arguments = new ArrayList<String>(Arrays.asList(getJavaBin())); // arguments.add("-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5010"); arguments.add("-Dfile.encoding=UTF-8"); arguments.add("-Dmockserver.logLevel=" + logLevel); arguments.add("-cp"); String classPath = resolveJarWithDependenciesPath(); if (dependencies != null && !dependencies.isEmpty()) { for (Dependency dependency : dependencies) { classPath += System.getProperty("path.separator"); classPath += resolvePluginDependencyJarPath(dependency); } } arguments.add(classPath); arguments.add(Main.class.getName()); if (serverPort != -1) { arguments.add("-serverPort"); arguments.add("" + serverPort); ConfigurationProperties.mockServerPort(serverPort); } if (proxyPort != -1) { arguments.add("-proxyPort"); arguments.add("" + proxyPort); ConfigurationProperties.proxyPort(proxyPort); } getLog().info(" "); getLog().info(StringUtils.rightPad("", 72, "-")); getLog().info("Running MockServer: " + Joiner.on(" ").join(arguments)); getLog().info(StringUtils.rightPad("", 72, "-")); getLog().info(" "); ProcessBuilder processBuilder = processBuildFactory.create(arguments); if (pipeLogToConsole) { processBuilder.redirectErrorStream(true); } try { processBuilder.start(); } catch (IOException e) { getLog().error("Exception while starting MockServer", e); } try { TimeUnit.SECONDS.sleep((timeout == 0 ? 2 : timeout)); } catch (InterruptedException e) { throw new RuntimeException("Exception while waiting for mock server JVM to start", e); } InstanceHolder.runInitializationClass(serverPort, createInitializer()); } } @VisibleForTesting String getJavaBin() { String javaBinary = "java"; File javaHomeDirectory = new File(System.getProperty("java.home")); for (String javaExecutable : new String[]{"java", "java.exe"}) { File javaExeLocation = new File(javaHomeDirectory, fileSeparators("bin/" + javaExecutable)); if (javaExeLocation.exists() && javaExeLocation.isFile()) { javaBinary = javaExeLocation.getAbsolutePath(); break; } } return javaBinary; } @VisibleForTesting String resolvePluginDependencyJarPath(Dependency dependency) { Artifact dependencyArtifact = repositorySystem.createArtifactWithClassifier(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion(), dependency.getType(), dependency.getClassifier()); artifactResolver.resolve(new ArtifactResolutionRequest().setArtifact(dependencyArtifact)); return dependencyArtifact.getFile().getAbsolutePath(); } @VisibleForTesting String resolveJarWithDependenciesPath() { Artifact jarWithDependencies = repositorySystem.createArtifactWithClassifier("org.mock-server", "mockserver-netty", getVersion(), "jar", "jar-with-dependencies"); artifactResolver.resolve(new ArtifactResolutionRequest().setArtifact(jarWithDependencies)); return jarWithDependencies.getFile().getAbsolutePath(); } @VisibleForTesting String getVersion() { String version = "3.9.12"; try { java.util.Properties p = new java.util.Properties(); InputStream is = getClass().getResourceAsStream("/META-INF/maven/org.mock-server/mockserver-maven-plugin/pom.properties"); if (is != null) { p.load(is); version = p.getProperty("version", "3.9.12"); } } catch (Exception e) { // ignore } getLog().info("Using org.mock-server:mockserver-netty:" + version + ":jar-with-dependencies"); return version; } }
package org.openlmis.report.builder; import org.openlmis.report.model.params.OrderFillRateReportParam; import java.util.Map; import static org.apache.ibatis.jdbc.SqlBuilder.*; import static org.openlmis.report.builder.helpers.RequisitionPredicateHelper.*; public class OrderFillRateQueryBuilder { public static String getQuery(Map params) { OrderFillRateReportParam queryParam = (OrderFillRateReportParam) params.get("filterCriteria"); return getQueryString(queryParam, queryParam.getUserId()); } private static void writePredicates(OrderFillRateReportParam param) { WHERE(programIsFilteredBy("programid")); WHERE(periodIsFilteredBy("periodid")); if (param.getZone() != 0) { WHERE(geoZoneIsFilteredBy("gz")); } if(param.getFacilityType() != 0){ WHERE(facilityTypeIsFilteredBy("facilityTypeId")); } if(param.getFacility() != 0){ WHERE(facilityIsFilteredBy("facilityId")); } if(param.getProductCategory() != 0){ WHERE(productCategoryIsFilteredBy("productCategoryId")); } if (multiProductFilterBy(param.getProducts(), "productId", "tracer") != null) { WHERE(multiProductFilterBy(param.getProducts(), "productId", "tracer")); } } private static String getQueryString(OrderFillRateReportParam param, Long userId) { BEGIN(); SELECT_DISTINCT("facilityname facility,quantityapproved as Approved,quantityreceived receipts ,productcode, product, " + " CASE WHEN COALESCE(quantityapproved, 0::numeric) = 0::numeric THEN 0::numeric\n" + " ELSE COALESCE(quantityreceived,0 )/ COALESCE(quantityapproved,0) * 100::numeric\n" + " END AS item_fill_rate "); FROM("vw_order_fill_rate join vw_districts gz on gz.district_id = vw_order_fill_rate.zoneId"); WHERE("facilityid in (select facility_id from vw_user_facilities where user_id = #{userId} and program_id = #{filterCriteria.program} )"); WHERE(" status in ('RELEASED') and totalproductsapproved > 0 "); writePredicates(param); GROUP_BY("product, approved, " + " quantityreceived, productcode, " + " facilityname "); ORDER_BY("facilityname"); String query=SQL(); return query; } public static String getTotalProductsReceived(Map param) { OrderFillRateReportParam queryParam = (OrderFillRateReportParam) param.get("filterCriteria"); BEGIN(); SELECT("count(totalproductsreceived) quantityreceived"); FROM("vw_order_fill_rate join vw_districts gz on gz.district_id = zoneId"); WHERE("facilityid in (select facility_id from vw_user_facilities where user_id = #{userId} and program_id = #{filterCriteria.program})"); WHERE("totalproductsreceived>0 and totalproductsapproved >0 and status in ('RELEASED') and periodId = #{filterCriteria.period} and programId= #{filterCriteria.program} and facilityId = #{filterCriteria.facility}"); writePredicates(queryParam); GROUP_BY("totalproductsreceived"); return SQL(); } public static String getTotalProductsOrdered(Map params) { OrderFillRateReportParam queryParam = (OrderFillRateReportParam) params.get("filterCriteria"); BEGIN(); SELECT("count(totalproductsapproved) quantityapproved"); FROM("vw_order_fill_rate join vw_districts gz on gz.district_id = zoneId"); WHERE("facilityid in (select facility_id from vw_user_facilities where user_id = #{userId} and program_id = #{filterCriteria.program})"); WHERE("totalproductsapproved > 0 and status in ('RELEASED') and periodId= #{filterCriteria.period} and programId= #{filterCriteria.program} and facilityId= #{filterCriteria.facility} "); writePredicates(queryParam); return SQL(); } public static String getSummaryQuery(Map params) { OrderFillRateReportParam queryParam = (OrderFillRateReportParam) params.get("filterCriteria"); BEGIN(); SELECT("count(totalproductsreceived) quantityreceived"); FROM("vw_order_fill_rate join vw_districts gz on gz.district_id = zoneId"); WHERE("facilityid in (select facility_id from vw_user_facilities where user_id = #{userId} and program_id = #{filterCriteria.program})"); WHERE("totalproductsreceived>0 and totalproductsapproved > 0 and status in ('RELEASED') and periodId= #{filterCriteria.period} and programId= #{filterCriteria.program} and facilityId= #{filterCriteria.facility}"); writePredicates(queryParam); GROUP_BY("totalproductsreceived"); String query = SQL(); RESET(); BEGIN(); SELECT("count(totalproductsapproved) quantityapproved"); FROM("vw_order_fill_rate join vw_districts gz on gz.district_id = zoneId"); WHERE("status in ('RELEASED') and totalproductsapproved > 0 and periodId= #{filterCriteria.period} and programId= #{filterCriteria.program} and facilityId= #{filterCriteria.facility}"); writePredicates(queryParam); query += " UNION " + SQL(); return query; } }
package org.project.openbaton.catalogue.nfvo; public enum Action { GRANT_OPERATION, ALLOCATE_RESOURCES, SCALE, SCALING, ERROR, RELEASE_RESOURCES, INSTANTIATE, MODIFY, SCALE_IN_FINISHED, SCALE_OUT_FINISHED, SCALE_UP_FINISHED, SCALE_DOWN_FINISHED, RELEASE_RESOURCES_FINISH, INSTANTIATE_FINISH, CONFIGURE, START; }
// This file is part of the OpenNMS(R) Application. // OpenNMS(R) is a derivative work, containing both original code, included code and modified // and included code are below. // OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. // This program is free software; you can redistribute it and/or modify // (at your option) any later version. // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. // For more information contact: // Tab Size = 8 package org.opennms.netmgt.poller.monitors; import java.io.IOException; import java.net.InetAddress; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.apache.log4j.Category; import org.opennms.core.utils.CollectionMath; import org.opennms.core.utils.ThreadCategory; import org.opennms.netmgt.model.PollStatus; import org.opennms.netmgt.ping.Pinger; import org.opennms.netmgt.poller.Distributable; import org.opennms.netmgt.poller.DistributionContext; import org.opennms.netmgt.poller.MonitoredService; import org.opennms.netmgt.poller.NetworkInterface; import org.opennms.netmgt.poller.NetworkInterfaceNotSupportedException; import org.opennms.netmgt.utils.ParameterMap; // this is marked not distributable because it relies on a shared library @Distributable(DistributionContext.DAEMON) final public class MultiIcmpMonitor extends IPv4Monitor { private static final int DEFAULT_MULTI_PING_COUNT = 20; private static final long DEFAULT_PING_INTERVAL = 50; /** * Constructs a new monitor. */ public MultiIcmpMonitor() throws IOException { } /** * <P> * Poll the specified address for ICMP service availability. * </P> * * <P> * The ICMP service monitor relies on Discovery for the actual generation of * IMCP 'ping' requests. A JSDT session with two channels (send/receive) is * utilized for passing poll requests and receiving poll replies from * discovery. All exchanges are SOAP/XML compliant. * </P> * @param parameters * The package parameters (timeout, retry, etc...) to be used for * this poll. * @param iface * The network interface to test the service on. * @return The availability of the interface and if a transition event * should be suppressed. * */ public PollStatus poll(MonitoredService svc, Map parameters) { NetworkInterface iface = svc.getNetInterface(); // Get interface address from NetworkInterface if (iface.getType() != NetworkInterface.TYPE_IPV4) throw new NetworkInterfaceNotSupportedException("Unsupported interface type, only TYPE_IPV4 currently supported"); Category log = ThreadCategory.getInstance(this.getClass()); PollStatus serviceStatus = PollStatus.unavailable(); InetAddress host = (InetAddress) iface.getAddress(); List<Number> responseTimes = null; try { // get parameters long timeout = ParameterMap.getKeyedLong(parameters, "timeout", Pinger.DEFAULT_TIMEOUT); int count = ParameterMap.getKeyedInteger(parameters, "pings", DEFAULT_MULTI_PING_COUNT); long pingInterval = ParameterMap.getKeyedLong(parameters, "interval", DEFAULT_PING_INTERVAL); responseTimes = new ArrayList<Number>(Pinger.parallelPing(host, count, timeout, pingInterval)); serviceStatus = PollStatus.available(); Collections.sort(responseTimes, new Comparator<Number>() { public int compare(Number arg0, Number arg1) { if (arg0 == null) { return -1; } else if (arg1 == null) { return 1; } else if (arg0.doubleValue() == arg1.doubleValue()) { return 0; } else { return arg0.doubleValue() < arg1.doubleValue()? -1 : 1; } } }); Map<String, Number> returnval = new LinkedHashMap<String,Number>(); for (int i = 0; i < responseTimes.size(); i++) { returnval.put("ping" + (i+1), responseTimes.get(i)); } returnval.put("loss", CollectionMath.countNull(responseTimes)); returnval.put("median", CollectionMath.median(responseTimes)); returnval.put("response-time", CollectionMath.average(responseTimes)); serviceStatus.setProperties(returnval); } catch (Exception e) { log.debug("failed to ping " + host, e); } return serviceStatus; } }
package org.eclipse.xtext.serializer.tokens; import java.util.List; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.common.util.URI; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EReference; import org.eclipse.emf.ecore.util.EcoreUtil; import org.eclipse.xtext.CrossReference; import org.eclipse.xtext.EcoreUtil2; import org.eclipse.xtext.GrammarUtil; import org.eclipse.xtext.conversion.IValueConverterService; import org.eclipse.xtext.conversion.ValueConverterException; import org.eclipse.xtext.linking.impl.LinkingHelper; import org.eclipse.xtext.naming.IQualifiedNameConverter; import org.eclipse.xtext.naming.QualifiedName; import org.eclipse.xtext.nodemodel.INode; import org.eclipse.xtext.parsetree.reconstr.impl.TokenUtil; import org.eclipse.xtext.resource.IEObjectDescription; import org.eclipse.xtext.scoping.IScope; import org.eclipse.xtext.scoping.IScopeProvider; import org.eclipse.xtext.serializer.diagnostic.ISerializationDiagnostic; import org.eclipse.xtext.serializer.diagnostic.ISerializationDiagnostic.Acceptor; import org.eclipse.xtext.serializer.diagnostic.ITokenDiagnosticProvider; import com.google.common.collect.Lists; import com.google.inject.Inject; /** * @author Moritz Eysholdt - Initial contribution and API */ public class CrossReferenceSerializer implements ICrossReferenceSerializer { @Inject protected ITokenDiagnosticProvider diagnostics; @Inject private LinkingHelper linkingHelper; @Inject private IQualifiedNameConverter qualifiedNameConverter; @Inject @SerializerScopeProviderBinding private IScopeProvider scopeProvider; @Inject protected TokenUtil tokenUtil; @Inject private IValueConverterService valueConverter; @Override public boolean isValid(EObject semanticObject, CrossReference crossref, EObject target, INode node, Acceptor errors) { if ((target == null || target.eIsProxy()) && node != null) { CrossReference crossrefFromNode = GrammarUtil.containingCrossReference(node.getGrammarElement()); return crossref == crossrefFromNode; } final EReference ref = GrammarUtil.getReference(crossref, semanticObject.eClass()); final IScope scope = scopeProvider.getScope(semanticObject, ref); if (scope == null) { if (errors != null) errors.accept(diagnostics.getNoScopeFoundDiagnostic(semanticObject, crossref, target)); return false; } if (target != null && target.eIsProxy()) { target = handleProxy(target, semanticObject, ref); } return getCrossReferenceNameFromScope(semanticObject, crossref, target, scope, errors) != null; } @Override public String serializeCrossRef(EObject semanticObject, CrossReference crossref, EObject target, INode node, Acceptor errors) { if ((target == null || target.eIsProxy()) && node != null) return tokenUtil.serializeNode(node); final EReference ref = GrammarUtil.getReference(crossref, semanticObject.eClass()); final IScope scope = scopeProvider.getScope(semanticObject, ref); if (scope == null) { if (errors != null) errors.accept(diagnostics.getNoScopeFoundDiagnostic(semanticObject, crossref, target)); return null; } if (target != null && target.eIsProxy()) { target = handleProxy(target, semanticObject, ref); } if (target != null && node != null) { String text = linkingHelper.getCrossRefNodeAsString(node, true); QualifiedName qn = qualifiedNameConverter.toQualifiedName(text); URI targetURI = EcoreUtil2.getPlatformResourceOrNormalizedURI(target); for (IEObjectDescription desc : scope.getElements(qn)) { if (targetURI.equals(desc.getEObjectURI())) return tokenUtil.serializeNode(node); } } return getCrossReferenceNameFromScope(semanticObject, crossref, target, scope, errors); } protected String getCrossReferenceNameFromScope(EObject semanticObject, CrossReference crossref, EObject target, final IScope scope, Acceptor errors) { String ruleName = linkingHelper.getRuleNameFrom(crossref); boolean foundOne = false; List<ISerializationDiagnostic> recordedErrors = null; for (IEObjectDescription desc : scope.getElements(target)) { foundOne = true; String unconverted = qualifiedNameConverter.toString(desc.getName()); try { return valueConverter.toString(unconverted, ruleName); } catch (ValueConverterException e) { if (errors != null) { if (recordedErrors == null) recordedErrors = Lists.newArrayList(); recordedErrors.add(diagnostics.getValueConversionExceptionDiagnostic(semanticObject, crossref, unconverted, e)); } } } if (errors != null) { if (recordedErrors != null) for (ISerializationDiagnostic diag : recordedErrors) errors.accept(diag); if (!foundOne) errors.accept(diagnostics.getNoEObjectDescriptionFoundDiagnostic(semanticObject, crossref, target, scope)); } return null; } /** * @since 2.11 */ protected EObject handleProxy(EObject proxy, EObject semanticObject, EReference reference) { if (reference != null && reference.isResolveProxies()) { if (reference.isMany()) { @SuppressWarnings("unchecked") EList<? extends EObject> list = (EList<? extends EObject>) semanticObject.eGet(reference); int index = list.indexOf(proxy); if (index >= 0) return list.get(index); } else { return (EObject) semanticObject.eGet(reference, true); } } return EcoreUtil.resolve(proxy, semanticObject); } }
package org.mwc.debrief.track_shift.views; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Paint; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.Enumeration; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.SortedSet; import java.util.TreeSet; import java.util.Vector; import org.eclipse.core.commands.ExecutionException; import org.eclipse.core.runtime.IStatus; import org.jfree.data.general.Series; import org.jfree.data.general.SeriesException; import org.jfree.data.time.FixedMillisecond; import org.jfree.data.time.RegularTimePeriod; import org.jfree.data.time.TimeSeries; import org.jfree.data.time.TimeSeriesCollection; import org.jfree.data.time.TimeSeriesDataItem; import org.jfree.util.ShapeUtilities; import org.mwc.cmap.core.CorePlugin; import org.mwc.debrief.core.ContextOperations.GenerateTMASegmentFromCuts.TMAfromCuts; import org.mwc.debrief.track_shift.controls.ZoneChart; import org.mwc.debrief.track_shift.controls.ZoneChart.ColorProvider; import org.mwc.debrief.track_shift.controls.ZoneChart.Zone; import org.mwc.debrief.track_shift.controls.ZoneChart.ZoneSlicer; import org.mwc.debrief.track_shift.zig_detector.ArtificalLegDetector; import org.mwc.debrief.track_shift.zig_detector.IOwnshipLegDetector; import org.mwc.debrief.track_shift.zig_detector.Precision; import org.mwc.debrief.track_shift.zig_detector.ownship.LegOfData; import org.mwc.debrief.track_shift.zig_detector.ownship.PeakTrackingOwnshipLegDetector; import Debrief.GUI.Frames.Application; import Debrief.ReaderWriter.Replay.ImportReplay; import Debrief.Wrappers.FixWrapper; import Debrief.Wrappers.ISecondaryTrack; import Debrief.Wrappers.SensorContactWrapper; import Debrief.Wrappers.SensorWrapper; import Debrief.Wrappers.TrackWrapper; import Debrief.Wrappers.Track.Doublet; import Debrief.Wrappers.Track.DynamicInfillSegment; import Debrief.Wrappers.Track.RelativeTMASegment; import Debrief.Wrappers.Track.TrackSegment; import Debrief.Wrappers.Track.TrackWrapper_Support.SegmentList; import MWC.Algorithms.FrequencyCalcs; import MWC.GUI.BaseLayer; import MWC.GUI.Editable; import MWC.GUI.ErrorLogger; import MWC.GUI.Layers; import MWC.GUI.LoggingService; import MWC.GUI.PlainWrapper; import MWC.GUI.JFreeChart.ColourStandardXYItemRenderer; import MWC.GUI.JFreeChart.ColouredDataItem; import MWC.GenericData.HiResDate; import MWC.GenericData.TimePeriod; import MWC.GenericData.Watchable; import MWC.GenericData.WatchableList; import MWC.GenericData.WorldDistance; import MWC.GenericData.WorldDistance.ArrayLength; import MWC.GenericData.WorldLocation; import MWC.GenericData.WorldSpeed; import MWC.GenericData.WorldVector; import MWC.TacticalData.Fix; import MWC.TacticalData.TrackDataProvider; import MWC.Utilities.TextFormatting.DebriefFormatDateTime; import junit.framework.TestCase; public final class StackedDotHelper { /** * special listener, that knows how to detatch itself * * @author ian * */ private abstract static class PrivatePropertyChangeListener implements PropertyChangeListener { private final TrackWrapper _track; private final String _property; public PrivatePropertyChangeListener(final TrackWrapper track, final String property) { _track = track; _property = property; } public void detach() { _track.removePropertyChangeListener(_property, this); } } /** * convenience class, to avoid having to pass plot into data helper * */ public static interface SetBackgroundShade { void setShade(final Paint errorColor); } /** * we have a special multistatics use case where we need to support multiple primary tracks. * */ public static interface SwitchableTrackProvider { /** * find out what the primary tracks are */ public WatchableList[] getPrimaryTracks(); /** * find out what the secondary track is */ public WatchableList[] getSecondaryTracks(); /** * whether we have any valid data * * @return yes/no */ public boolean isPopulated(); } private static class TargetDoublet { public TrackSegment targetParent; public FixWrapper targetFix; } public static class TestSlicing extends TestCase { public void testOSLegDetector() { final TimeSeries osC = new TimeSeries(new FixedMillisecond()); long time = 0; osC.add(new FixedMillisecond(time++), 20d); osC.add(new FixedMillisecond(time++), 21d); osC.add(new FixedMillisecond(time++), 22d); osC.add(new FixedMillisecond(time++), 20d); osC.add(new FixedMillisecond(time++), 21d); osC.add(new FixedMillisecond(time++), 20d); assertFalse(containsIdenticalValues(osC, 3)); // inject some more duplicates osC.add(new FixedMillisecond(time++), 20d); osC.add(new FixedMillisecond(time++), 20d); osC.add(new FixedMillisecond(time++), 20d); assertTrue(containsIdenticalValues(osC, 3)); osC.clear(); osC.add(new FixedMillisecond(time++), 20d); osC.add(new FixedMillisecond(time++), 21d); osC.add(new FixedMillisecond(time++), 21d); osC.add(new FixedMillisecond(time++), 20d); osC.add(new FixedMillisecond(time++), 20d); osC.add(new FixedMillisecond(time++), 20d); osC.add(new FixedMillisecond(time++), 21d); osC.add(new FixedMillisecond(time++), 21d); osC.add(new FixedMillisecond(time++), 20d); osC.add(new FixedMillisecond(time++), 20d); assertFalse("check we're verifying single runs of matches", containsIdenticalValues(osC, 3)); osC.add(new FixedMillisecond(time++), 20d); osC.add(new FixedMillisecond(time++), 20d); assertTrue(containsIdenticalValues(osC, 3)); } public void testSetLeg() { final TrackWrapper host = new TrackWrapper(); host.setName("Host Track"); // create a sensor final SensorWrapper sensor = new SensorWrapper("Sensor"); sensor.setHost(host); host.add(sensor); // add some cuts final ArrayList<SensorContactWrapper> contacts = new ArrayList<SensorContactWrapper>(); for (int i = 0; i < 30; i++) { final HiResDate thisDTG = new HiResDate(10000 * i); final WorldLocation thisLocation = new WorldLocation(2 + 0.01 * i, 2 + 0.03 * i, 0); final SensorContactWrapper scw = new SensorContactWrapper(host .getName(), thisDTG, new WorldDistance(4, WorldDistance.MINUTES), 25d, thisLocation, Color.RED, "" + i, 0, sensor.getName()); sensor.add(scw); contacts.add(scw); // also create a host track fix at this DTG final Fix theFix = new Fix(thisDTG, thisLocation, 12d, 3d); final FixWrapper newF = new FixWrapper(theFix); host.add(newF); } // produce the target leg final TrackWrapper target = new TrackWrapper(); target.setName("Tgt Track"); // add a TMA leg final Layers theLayers = new Layers(); theLayers.addThisLayer(host); theLayers.addThisLayer(target); final SensorContactWrapper[] contactArr = contacts.toArray( new SensorContactWrapper[] {}); final RelativeTMASegment newLeg = new RelativeTMASegment(contactArr, new WorldVector(1, 1, 0), new WorldSpeed(12, WorldSpeed.Kts), 12d, theLayers, Color.red); target.add(newLeg); final BaseStackedDotsView view = new BaseStackedDotsView(true, false) { @Override protected boolean allowDisplayOfTargetOverview() { return false; } @Override protected boolean allowDisplayOfZoneChart() { return false; } @Override protected String formatValue(final double value) { return "" + value; } @Override protected ZoneSlicer getOwnshipZoneSlicer(final ColorProvider blueProv) { return null; } @Override protected String getType() { return null; } @Override protected String getUnits() { return null; } @Override protected void makeActions() { // don't make actions, since they rely on Workbench running } @Override protected void updateData(final boolean updateDoublets) { // no, nothing to do. } }; // try to set a zone on the track Zone trimmedPeriod = new Zone(150000, 220000, Color.RED); view.setLeg(host, target, trimmedPeriod); // ok, check the leg has changed assertEquals("leg start changed", 150000, target.getStartDTG().getDate() .getTime()); assertEquals("leg start changed", 220000, target.getEndDTG().getDate() .getTime()); // ok, also see if we can create a new leg trimmedPeriod = new Zone(250000, 320000, Color.RED); view.setLeg(host, target, trimmedPeriod); } } public static class TestUpdates extends TestCase { private static class SwitchableTrackProviderImpl implements SwitchableTrackProvider { private final TrackDataProvider _prov; public SwitchableTrackProviderImpl(final TrackDataProvider tracks) { _prov = tracks; } @Override public WatchableList[] getPrimaryTracks() { return new WatchableList[] {_prov.getPrimaryTrack()}; } @Override public WatchableList[] getSecondaryTracks() { return _prov.getSecondaryTracks(); } @Override public boolean isPopulated() { return _prov != null; } } private static class TrackDataHelper implements TrackDataProvider { private final List<TrackWrapper> _primaries = new ArrayList<TrackWrapper>(); private final List<TrackWrapper> _secondaries = new ArrayList<TrackWrapper>(); public void addPrimary(final TrackWrapper ownship) { _primaries.add(ownship); } public void addSecondary(final TrackWrapper tma) { _secondaries.add(tma); } @Override public void addTrackDataListener(final TrackDataListener listener) { // don't bother } @Override public void addTrackShiftListener(final TrackShiftListener listener) { // don't bother } @Override public void fireTracksChanged() { // don't bother } @Override public void fireTrackShift(final WatchableList watchableList) { // don't bother } @Override public WatchableList getPrimaryTrack() { return _primaries.get(0); } @Override public WatchableList[] getSecondaryTracks() { final WatchableList[] res = new WatchableList[_secondaries.size()]; int ctr = 0; final Iterator<TrackWrapper> sIter = _secondaries.iterator(); while (sIter.hasNext()) { res[ctr++] = sIter.next(); } return res; } @Override public void removeTrackDataListener(final TrackDataListener listener) { // don't bother } @Override public void removeTrackShiftListener(final TrackShiftListener listener) { // don't bother } } private SensorContactWrapper[] getAllCutsFrom( final SensorWrapper secondSensor) { final SensorContactWrapper[] res = new SensorContactWrapper[secondSensor .size()]; final Enumeration<Editable> sIter = secondSensor.elements(); int ctr = 0; while (sIter.hasMoreElements()) { res[ctr++] = (SensorContactWrapper) sIter.nextElement(); } return res; } public Layers getData() { final Layers layers = new Layers(); final ImportReplay importer = new ImportReplay(); importer.setLayers(layers); try { importer.readLine( "100112 120000 SUBJECT VC 60 06 00.00 N 000 15 00.00 E 320.00 9.00 0.00"); importer.readLine( "100112 120000 SENSOR FA 60 10 48.00 N 000 12 00.00 E 200.00 12.00 0.00"); importer.readLine( ";SENSOR2: 100112 120016 SENSOR @A NULL 162.64 237.36 150.910 NULL \"hull sensor\" SUBJECT held on hull sensor"); importer.readLine( ";SENSOR2: 100112 120015 SENSOR @A NULL 166.15 233.85 150.920 NULL \"tail sensor\" SUBJECT held on tail sensor"); importer.readLine( "100112 120020 SUBJECT VC 60 06 02.30 N 000 14 56.13 E 320.00 9.00 0.00 "); importer.readLine( "100112 120020 SENSOR FA 60 10 44.24 N 000 11 57.25 E 200.00 12.00 0.00 "); importer.readLine( ";SENSOR2: 100112 120020 SENSOR @A NULL 162.39 237.61 150.909 NULL \"hull sensor\" SUBJECT held on hull sensor"); importer.readLine( ";SENSOR2: 100112 120020 SENSOR @A NULL 165.99 234.01 150.919 NULL \"tail sensor\" SUBJECT held on tail sensor"); importer.readLine( "100112 120040 SUBJECT VC 60 06 04.60 N 000 14 52.26 E 320.00 9.00 0.00 "); importer.readLine( "100112 120040 SENSOR FA 60 10 40.48 N 000 11 54.50 E 200.00 12.00 0.00 "); importer.readLine( ";SENSOR2: 100112 120040 SENSOR @A NULL 162.13 237.87 150.908 NULL \"hull sensor\" SUBJECT held on hull sensor"); importer.readLine( ";SENSOR2: 100112 120040 SENSOR @A NULL 165.82 234.18 150.919 NULL \"tail sensor\" SUBJECT held on tail sensor"); importer.readLine( "100112 120100 SUBJECT VC 60 06 06.89 N 000 14 48.39 E 320.00 9.00 0.00 "); importer.readLine( "100112 120100 SENSOR FA 60 10 36.72 N 000 11 51.75 E 200.00 12.00 0.00 "); importer.readLine( ";SENSOR2: 100112 120100 SENSOR @A NULL 161.87 238.13 150.907 NULL \"hull sensor\" SUBJECT held on hull sensor"); // miss this tail measurement // importer.readLine( // ";SENSOR2: 100112 120100 SENSOR @A NULL 165.64 234.36 150.918 NULL \"tail sensor\" // SUBJECT held on tail sensor"); importer.readLine( "100112 120120 SUBJECT VC 60 06 09.19 N 000 14 44.53 E 320.00 9.00 0.00 "); importer.readLine( "100112 120120 SENSOR FA 60 10 32.96 N 000 11 49.00 E 200.00 12.00 0.00 "); importer.readLine( ";SENSOR2: 100112 120120 SENSOR @A NULL 161.59 238.41 150.906 NULL \"hull sensor\" SUBJECT held on hull sensor"); // importer.readLine( // ";SENSOR2: 100112 120120 SENSOR @A NULL 165.46 234.54 150.918 NULL \"tail sensor\" // SUBJECT held on tail sensor"); importer.readLine( "100112 120140 SUBJECT VC 60 06 11.49 N 000 14 40.66 E 320.00 9.00 0.00 "); importer.readLine( "100112 120140 SENSOR FA 60 10 29.21 N 000 11 46.25 E 200.00 12.00 0.00 "); // importer.readLine( // ";SENSOR2: 100112 120140 SENSOR @A NULL 161.29 238.71 150.905 NULL \"hull sensor\" // SUBJECT held on hull sensor"); importer.readLine( ";SENSOR2: 100112 120140 SENSOR @A NULL 165.27 234.73 150.918 NULL \"tail sensor\" SUBJECT held on tail sensor"); importer.readLine( "100112 120200 SUBJECT VC 60 06 13.79 N 000 14 36.79 E 320.00 9.00 0.00 "); importer.readLine( "100112 120200 SENSOR FA 60 10 25.45 N 000 11 43.49 E 200.00 12.00 0.00 "); importer.readLine( ";SENSOR2: 100112 120200 SENSOR @A NULL 160.99 239.01 150.904 NULL \"hull sensor\" SUBJECT held on hull sensor"); importer.readLine( ";SENSOR2: 100112 120200 SENSOR @A NULL 165.08 234.92 150.917 NULL \"tail sensor\" SUBJECT held on tail sensor"); importer.readLine( "100112 120220 SUBJECT VC 60 06 16.09 N 000 14 32.92 E 320.00 9.00 0.00 "); importer.readLine( "100112 120220 SENSOR FA 60 10 21.69 N 000 11 40.74 E 200.00 12.00 0.00 "); importer.readLine( ";SENSOR2: 100112 120220 SENSOR @A NULL 160.67 239.33 150.902 NULL \"hull sensor\" SUBJECT held on hull sensor"); importer.readLine( ";SENSOR2: 100112 120220 SENSOR @A NULL 164.87 235.13 150.916 NULL \"tail sensor\" SUBJECT held on tail sensor"); importer.readLine( "100112 120240 SUBJECT VC 60 06 18.39 N 000 14 29.05 E 320.00 9.00 0.00 "); importer.readLine( "100112 120240 SENSOR FA 60 10 17.93 N 000 11 37.99 E 200.00 12.00 0.00 "); importer.readLine( ";SENSOR2: 100112 120240 SENSOR @A NULL 160.33 239.67 150.901 NULL \"hull sensor\" SUBJECT held on hull sensor"); importer.readLine( ";SENSOR2: 100112 120240 SENSOR @A NULL 164.66 235.34 150.916 NULL \"tail sensor\" SUBJECT held on tail sensor"); importer.readLine( "100112 120300 SUBJECT VC 60 06 20.68 N 000 14 25.18 E 320.00 9.00 0.00 "); importer.readLine( "100112 120300 SENSOR FA 60 10 14.17 N 000 11 35.24 E 200.00 12.00 0.00 "); importer.readLine( ";SENSOR2: 100112 120300 SENSOR @A NULL 159.98 240.02 150.900 NULL \"hull sensor\" SUBJECT held on hull sensor"); importer.readLine( ";SENSOR2: 100112 120300 SENSOR @A NULL 164.44 235.56 150.915 NULL \"tail sensor\" SUBJECT held on tail sensor"); importer.storePendingSensors(); } catch (final IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } return layers; } public TrackDataProvider getTrackData() { final Layers layers = getData(); final TrackWrapper ownship = (TrackWrapper) layers.findLayer("SENSOR"); assertNotNull("found ownship", ownship); final BaseLayer sensors = ownship.getSensors(); assertEquals("has all sensors", 2, sensors.size()); // get the tail final SensorWrapper tailSensor = (SensorWrapper) sensors.find( "tail sensor"); assertNotNull("found tail", tailSensor); final SensorWrapper hullSensor = (SensorWrapper) sensors.find( "hull sensor"); assertNotNull("found hull", hullSensor); // give it it's offset tailSensor.setSensorOffset(new ArrayLength(1000)); final SensorContactWrapper[] tailItems = getAllCutsFrom(tailSensor); final SensorContactWrapper[] hullItems = getAllCutsFrom(hullSensor); // note: we've commented out some assertEquals("got all cuts", 8, tailItems.length); assertEquals("got all cuts", 9, hullItems.length); final String newName = "TMA_LEG"; // ok, we also have to generate some target track final TMAfromCuts genny = new TMAfromCuts(tailItems, layers, new WorldVector(Math.PI / 2, 0.02, 0), 45, new WorldSpeed(12, WorldSpeed.Kts), Color.RED) { @Override public String getTrackNameFor(final TrackWrapper newTrack) { return newName; } @Override public boolean isRunning() { return false; } }; // create the new TMA try { genny.execute(null, null); } catch (final Exception e) { fail("exception thrown while running command" + e.getMessage()); e.printStackTrace(); } // get the TMA final TrackWrapper tma = (TrackWrapper) layers.findLayer(newName); assertNotNull("found it", tma); // have a butchers assertEquals("has segments", 1, tma.getSegments().size()); final Collection<Editable> fixes = tma.getUnfilteredItems(new HiResDate( 0), new HiResDate(new Date().getTime())); // note: only 8 fixes in leg, since two sensor cut was hidden assertEquals("has fixes", 8, fixes.size()); final FixWrapper firstFix = (FixWrapper) fixes.toArray(new Editable[] {})[0]; @SuppressWarnings("deprecation") final String toTime = firstFix.getDateTimeGroup().getDate().toGMTString(); assertEquals("valid first time", "12 Jan 2010 12:00:15 GMT", toTime); // and now the track data object final TrackDataHelper prov = new TrackDataHelper(); prov.addPrimary(ownship); prov.addSecondary(tma); return prov; } public void testGetCourseData() { final Layers layers = getData(); final TrackWrapper ownship = (TrackWrapper) layers.findLayer("SENSOR"); assertNotNull("found ownship", ownship); // sort out start/end times HiResDate startDTG = DebriefFormatDateTime.parseThis("100112", "120200"); HiResDate endDTG = DebriefFormatDateTime.parseThis("100112", "120240"); TimeSeries courseData = getStandardCourseData(ownship, false, startDTG, endDTG); assertNotNull("found course data", courseData); assertEquals("has items", 3, courseData.getItemCount()); TimeSeriesDataItem firstItem = courseData.getDataItem(0); assertEquals("correct course", 200d, firstItem.getValue()); TimeSeriesDataItem lastItem = courseData.getDataItem(courseData .getItemCount() - 1); assertEquals("correct course", 200d, lastItem.getValue()); // sort out start/end times startDTG = DebriefFormatDateTime.parseThis("100112", "110000"); endDTG = DebriefFormatDateTime.parseThis("100112", "120240"); courseData = getStandardCourseData(ownship, false, startDTG, endDTG); assertNotNull("found course data", courseData); assertEquals("has items", 9, courseData.getItemCount()); firstItem = courseData.getDataItem(0); assertEquals("correct course", 200d, firstItem.getValue()); lastItem = courseData.getDataItem(courseData.getItemCount() - 1); assertEquals("correct course", 200d, lastItem.getValue()); // switch the flip axes value courseData = getStandardCourseData(ownship, true, startDTG, endDTG); assertNotNull("found course data", courseData); assertEquals("has items", 9, courseData.getItemCount()); firstItem = courseData.getDataItem(0); assertEquals("correct course", -160d, firstItem.getValue()); lastItem = courseData.getDataItem(courseData.getItemCount() - 1); assertEquals("correct course", -160d, lastItem.getValue()); } public void testGetMultiPrimaryTrackData() throws FileNotFoundException { // get our sample data-file final ImportReplay importer = new ImportReplay(); final Layers layers = new Layers(); final String fName = "../org.mwc.cmap.combined.feature/root_installs/sample_data/MultiStatics/multistatics_buoyfield.rep"; final File inFile = new File(fName); assertTrue("input file exists", inFile.exists()); final FileInputStream is = new FileInputStream(fName); importer.importThis(fName, is, layers); // sort out the sensors importer.storePendingSensors(); // get the sensor tracks final TrackWrapper rx_1 = (TrackWrapper) layers.findLayer("RX_1"); final TrackWrapper rx_2 = (TrackWrapper) layers.findLayer("RX_2"); final SensorWrapper rx_1_sensor = (SensorWrapper) rx_1.getSensors() .first(); final SensorWrapper rx_2_sensor = (SensorWrapper) rx_2.getSensors() .first(); assertNotNull("found sensor 1", rx_1_sensor); assertNotNull("found sensor 2", rx_2_sensor); // ok, we need to move one sensor // get the tail final SensorContactWrapper[] rx1_cuts = getAllCutsFrom(rx_1_sensor); final SensorContactWrapper[] rx2_cuts = getAllCutsFrom(rx_2_sensor); // note: we've commented out some assertEquals("got all cuts", 42, rx1_cuts.length); assertEquals("got all cuts", 66, rx2_cuts.length); final String newName = "TMA_LEG"; // ok, we also have to generate some target track final TMAfromCuts genny = new TMAfromCuts(rx1_cuts, layers, new WorldVector(Math.PI / 2, 0.02, 0), 45, new WorldSpeed(12, WorldSpeed.Kts), Color.RED) { @Override public String getTrackNameFor(final TrackWrapper newTrack) { return newName; } @Override public boolean isRunning() { return false; } }; // create the new TMA try { genny.execute(null, null); } catch (final Exception e) { fail("exception thrown while running command" + e.getMessage()); e.printStackTrace(); } // get the TMA final TrackWrapper tma = (TrackWrapper) layers.findLayer(newName); assertNotNull("found it", tma); // have a butchers assertEquals("has segments", 1, tma.getSegments().size()); final Collection<Editable> fixes = tma.getUnfilteredItems(new HiResDate( 0), new HiResDate(new Date().getTime())); assertEquals("has fixes", 42, fixes.size()); final FixWrapper firstFix = (FixWrapper) fixes.toArray(new Editable[] {})[0]; @SuppressWarnings("deprecation") final String toTime = firstFix.getDateTimeGroup().getDate().toGMTString(); assertEquals("valid first time", "12 Dec 2014 12:03:40 GMT", toTime); // and now the track data object final TrackDataHelper prov = new TrackDataHelper(); prov.addPrimary(rx_1); prov.addPrimary(rx_2); prov.addSecondary(tma); // return prov; } public void testUpdateBearings() throws ExecutionException { final StackedDotHelper helper = new StackedDotHelper(); final TimeSeriesCollection dotPlotData = new TimeSeriesCollection(); final TimeSeriesCollection linePlotData = new TimeSeriesCollection(); final TrackDataProvider tracks = getTrackData(); final SwitchableTrackProvider switcher = new SwitchableTrackProviderImpl( tracks); boolean onlyVis = false; final boolean showCourse = true; final boolean flipAxes = false; final ErrorLogger logger = new LoggingService(); final boolean updateDoublets = true; final TimeSeriesCollection targetCourseSeries = new TimeSeriesCollection(); final TimeSeriesCollection targetSpeedSeries = new TimeSeriesCollection(); final TimeSeriesCollection measuredValuesColl = new TimeSeriesCollection(); final TimeSeriesCollection ambigValuesColl = new TimeSeriesCollection(); final TimeSeries ownshipCourseSeries = new TimeSeries("OS Course"); final TimeSeries targetBearingSeries = new TimeSeries("Tgt Bearing"); final TimeSeries targetCalculatedSeries = new TimeSeries("target calc"); final ResidualXYItemRenderer overviewSpeedRenderer = null; final WrappingResidualRenderer overviewCourseRenderer = null; final SetBackgroundShade backShader = new SetBackgroundShade() { @Override public void setShade(final Paint errorColor) { // just ignore it } }; helper.initialise(switcher, true, onlyVis, logger, "Bearings", true, false); helper.updateBearingData(dotPlotData, linePlotData, switcher, onlyVis, showCourse, flipAxes, logger, updateDoublets, targetCourseSeries, targetSpeedSeries, measuredValuesColl, ambigValuesColl, ownshipCourseSeries, targetBearingSeries, targetCalculatedSeries, overviewSpeedRenderer, overviewCourseRenderer, backShader); // have a look at what's happened // error plot. the data is ambiguous, so we've got 4 sets of errors (two sensors, port & stbd) assertEquals("has error data", 4, dotPlotData.getSeriesCount()); // note: even though TMA only has 9 fixes, we get 10 errors since we interpolate assertEquals("series correct name", "ERRORStail sensor", dotPlotData .getSeries(0).getKey()); assertEquals("series correct name", "ERRORS_amb_tail sensor", dotPlotData .getSeries(1).getKey()); assertEquals("series correct name", "ERRORShull sensor", dotPlotData .getSeries(2).getKey()); assertEquals("series correct name", "ERRORS_amb_hull sensor", dotPlotData .getSeries(3).getKey()); // note: even though TMA only has 9 fixes, we get 10 errors since we interpolate assertEquals("series correct length", 8, dotPlotData.getSeries(0) .getItemCount()); assertEquals("series correct length", 8, dotPlotData.getSeries(1) .getItemCount()); assertEquals("series correct length", 9, dotPlotData.getSeries(2) .getItemCount()); assertEquals("series correct length", 9, dotPlotData.getSeries(3) .getItemCount()); // error plot. the data is ambiguous, so we've got 4 sets of errors (two sensors, port & stbd) assertEquals("has error data", 6, linePlotData.getSeriesCount()); // note: even though TMA only has 9 fixes, we get 10 errors since we interpolate assertEquals("series correct name", "M_tail sensor", linePlotData .getSeries(0).getKey()); assertEquals("series correct name", "M_hull sensor", linePlotData .getSeries(1).getKey()); assertEquals("series correct name", "M_tail sensor(A)", linePlotData .getSeries(2).getKey()); assertEquals("series correct name", "M_hull sensor(A)", linePlotData .getSeries(3).getKey()); assertEquals("series correct name", "Calculatedtail sensor", linePlotData .getSeries(4).getKey()); assertEquals("series correct name", "Calculatedhull sensor", linePlotData .getSeries(5).getKey()); // note: even though TMA only has 9 fixes, we get 10 errors since we interpolate assertEquals("series correct length", 8, linePlotData.getSeries(0) .getItemCount()); assertEquals("series correct length", 9, linePlotData.getSeries(1) .getItemCount()); assertEquals("series correct length", 8, linePlotData.getSeries(2) .getItemCount()); assertEquals("series correct length", 9, linePlotData.getSeries(3) .getItemCount()); assertEquals("series correct length", 8, linePlotData.getSeries(4) .getItemCount()); assertEquals("series correct length", 9, linePlotData.getSeries(5) .getItemCount()); // ok, hide a sensor, and recalculate final TrackWrapper primary = (TrackWrapper) tracks.getPrimaryTrack(); final SensorWrapper firstSensor = (SensorWrapper) primary.getSensors() .elements().nextElement(); firstSensor.setVisible(true); onlyVis = false; helper.updateBearingData(dotPlotData, linePlotData, switcher, onlyVis, showCourse, flipAxes, logger, updateDoublets, targetCourseSeries, targetSpeedSeries, measuredValuesColl, ambigValuesColl, ownshipCourseSeries, targetBearingSeries, targetCalculatedSeries, overviewSpeedRenderer, overviewCourseRenderer, backShader); // have a look at what's happened // error plot. the data is ambiguous, so we've got 4 sets of errors (two sensors, port & stbd) assertEquals("has error data", 4, dotPlotData.getSeriesCount()); // note: even though TMA only has 9 fixes, we get 10 errors since we interpolate assertEquals("series correct name", "ERRORStail sensor", dotPlotData .getSeries(0).getKey()); assertEquals("series correct name", "ERRORS_amb_tail sensor", dotPlotData .getSeries(1).getKey()); assertEquals("series correct name", "ERRORShull sensor", dotPlotData .getSeries(2).getKey()); assertEquals("series correct name", "ERRORS_amb_hull sensor", dotPlotData .getSeries(3).getKey()); // note: even though TMA only has 9 fixes, we get 10 errors since we interpolate assertEquals("series correct length", 8, dotPlotData.getSeries(0) .getItemCount()); assertEquals("series correct length", 8, dotPlotData.getSeries(1) .getItemCount()); assertEquals("series correct length", 9, dotPlotData.getSeries(2) .getItemCount()); assertEquals("series correct length", 9, dotPlotData.getSeries(3) .getItemCount()); // error plot. the data is ambiguous, so we've got 4 sets of errors (two sensors, port & stbd) assertEquals("has error data", 6, linePlotData.getSeriesCount()); // note: even though TMA only has 9 fixes, we get 10 errors since we interpolate assertEquals("series correct name", "M_tail sensor", linePlotData .getSeries(0).getKey()); assertEquals("series correct name", "M_hull sensor", linePlotData .getSeries(1).getKey()); assertEquals("series correct name", "M_tail sensor(A)", linePlotData .getSeries(2).getKey()); assertEquals("series correct name", "M_hull sensor(A)", linePlotData .getSeries(3).getKey()); assertEquals("series correct name", "Calculatedtail sensor", linePlotData .getSeries(4).getKey()); assertEquals("series correct name", "Calculatedhull sensor", linePlotData .getSeries(5).getKey()); // note: even though TMA only has 9 fixes, we get 10 errors since we interpolate assertEquals("series correct length", 8, linePlotData.getSeries(0) .getItemCount()); assertEquals("series correct length", 9, linePlotData.getSeries(1) .getItemCount()); assertEquals("series correct length", 8, linePlotData.getSeries(2) .getItemCount()); assertEquals("series correct length", 9, linePlotData.getSeries(3) .getItemCount()); assertEquals("series correct length", 8, linePlotData.getSeries(4) .getItemCount()); assertEquals("series correct length", 9, linePlotData.getSeries(5) .getItemCount()); onlyVis = true; helper.updateBearingData(dotPlotData, linePlotData, switcher, onlyVis, showCourse, flipAxes, logger, updateDoublets, targetCourseSeries, targetSpeedSeries, measuredValuesColl, ambigValuesColl, ownshipCourseSeries, targetBearingSeries, targetCalculatedSeries, overviewSpeedRenderer, overviewCourseRenderer, backShader); // have a look at what's happened // error plot. the data is ambiguous, so we've got 4 sets of errors (two sensors, port & stbd) assertEquals("has error data", 2, dotPlotData.getSeriesCount()); // note: even though TMA only has 9 fixes, we get 10 errors since we interpolate assertEquals("series correct length", 9, dotPlotData.getSeries(0) .getItemCount()); assertEquals("series correct length", 9, dotPlotData.getSeries(1) .getItemCount()); // note: even though TMA only has 9 fixes, we get 10 errors since we interpolate assertEquals("series correct name", "ERRORS", dotPlotData.getSeries(0) .getKey()); assertEquals("series correct name", "ERRORS_amb_", dotPlotData.getSeries( 1).getKey()); // error plot. the data is ambiguous, so we've got 4 sets of errors (two sensors, port & stbd) assertEquals("has error data", 3, linePlotData.getSeriesCount()); // note: even though TMA only has 9 fixes, we get 10 errors since we interpolate assertEquals("series correct length", 9, linePlotData.getSeries(0) .getItemCount()); assertEquals("series correct length", 9, linePlotData.getSeries(1) .getItemCount()); assertEquals("series correct length", 9, linePlotData.getSeries(2) .getItemCount()); // note: even though TMA only has 9 fixes, we get 10 errors since we interpolate assertEquals("series correct name", "M_", linePlotData.getSeries(0) .getKey()); assertEquals("series correct name", "M_(A)", linePlotData.getSeries(1) .getKey()); assertEquals("series correct name", "Calculated", linePlotData.getSeries( 2).getKey()); // and make the second sensor visible final Enumeration<Editable> sIter = primary.getSensors().elements(); while (sIter.hasMoreElements()) { final SensorWrapper sensor = (SensorWrapper) sIter.nextElement(); sensor.setVisible(true); } helper.updateBearingData(dotPlotData, linePlotData, switcher, onlyVis, showCourse, flipAxes, logger, updateDoublets, targetCourseSeries, targetSpeedSeries, measuredValuesColl, ambigValuesColl, ownshipCourseSeries, targetBearingSeries, targetCalculatedSeries, overviewSpeedRenderer, overviewCourseRenderer, backShader); // error plot. the data is ambiguous, so we've got 4 sets of errors (two sensors, port & stbd) assertEquals("has error data", 4, dotPlotData.getSeriesCount()); // note: even though TMA only has 9 fixes, we get 10 errors since we interpolate assertEquals("series correct name", "ERRORStail sensor", dotPlotData .getSeries(0).getKey()); assertEquals("series correct name", "ERRORS_amb_tail sensor", dotPlotData .getSeries(1).getKey()); assertEquals("series correct name", "ERRORShull sensor", dotPlotData .getSeries(2).getKey()); assertEquals("series correct name", "ERRORS_amb_hull sensor", dotPlotData .getSeries(3).getKey()); // note: even though TMA only has 9 fixes, we get 10 errors since we interpolate assertEquals("series correct length", 8, dotPlotData.getSeries(0) .getItemCount()); assertEquals("series correct length", 8, dotPlotData.getSeries(1) .getItemCount()); assertEquals("series correct length", 9, dotPlotData.getSeries(2) .getItemCount()); assertEquals("series correct length", 9, dotPlotData.getSeries(3) .getItemCount()); // error plot. the data is ambiguous, so we've got 4 sets of errors (two sensors, port & stbd) assertEquals("has error data", 6, linePlotData.getSeriesCount()); // note: even though TMA only has 9 fixes, we get 10 errors since we interpolate assertEquals("series correct name", "M_tail sensor", linePlotData .getSeries(0).getKey()); assertEquals("series correct name", "M_hull sensor", linePlotData .getSeries(1).getKey()); assertEquals("series correct name", "M_tail sensor(A)", linePlotData .getSeries(2).getKey()); assertEquals("series correct name", "M_hull sensor(A)", linePlotData .getSeries(3).getKey()); assertEquals("series correct name", "Calculatedtail sensor", linePlotData .getSeries(4).getKey()); assertEquals("series correct name", "Calculatedhull sensor", linePlotData .getSeries(5).getKey()); // note: even though TMA only has 9 fixes, we get 10 errors since we interpolate assertEquals("series correct length", 8, linePlotData.getSeries(0) .getItemCount()); assertEquals("series correct length", 9, linePlotData.getSeries(1) .getItemCount()); assertEquals("series correct length", 8, linePlotData.getSeries(2) .getItemCount()); assertEquals("series correct length", 9, linePlotData.getSeries(3) .getItemCount()); assertEquals("series correct length", 8, linePlotData.getSeries(4) .getItemCount()); assertEquals("series correct length", 9, linePlotData.getSeries(5) .getItemCount()); } } public static final String MEASURED_DATASET = "Measured"; public static final String CALCULATED_VALUES = "Calculated"; /** * the maximum number of items we plot as symbols. Above this we just use a line */ private final static int MAX_ITEMS_TO_PLOT = 1000; /** * produce a color shade, according to whether the max error is inside 3 degrees or not. * * @param errorSeries * @return */ private static Paint calculateErrorShadeFor( final TimeSeriesCollection errorSeries, final double cutOffValue) { final Paint col; double maxError = 0d; final Iterator<?> sIter = errorSeries.getSeries().iterator(); while (sIter.hasNext()) { final TimeSeries ts = (TimeSeries) sIter.next(); final List<?> items = ts.getItems(); for (final Iterator<?> iterator = items.iterator(); iterator.hasNext();) { final TimeSeriesDataItem item = (TimeSeriesDataItem) iterator.next(); final boolean useMe; // check this isn't infill if (item instanceof ColouredDataItem) { final ColouredDataItem cd = (ColouredDataItem) item; useMe = cd.isShapeFilled(); } else { useMe = true; } if (useMe) { final double thisE = (Double) item.getValue(); maxError = Math.max(maxError, Math.abs(thisE)); } } } if (maxError > cutOffValue) { col = new Color(1f, 0f, 0f, 0.05f); } else { final float shade = (float) (0.03f + (cutOffValue - maxError) * 0.02f); col = new Color(0f, 1f, 0f, shade); } return col; } private static void clearPrivateListeners(final ISecondaryTrack targetTrack) { if (targetTrack instanceof TrackWrapper) { final TrackWrapper target = (TrackWrapper) targetTrack; // ok - we may have registered some interpolation listeners on the track // delete them if necessary final PropertyChangeListener[] list = target.getPropertyChangeListeners( PlainWrapper.LOCATION_CHANGED); for (final PropertyChangeListener t : list) { if (t instanceof PrivatePropertyChangeListener) { final PrivatePropertyChangeListener prop = (PrivatePropertyChangeListener) t; prop.detach(); } } } } /** * either produce a list, or build up a list of segments * * @param secondaryTrack * @param editable * @return */ private static SegmentList collateSegments( final ISecondaryTrack secondaryTrack, final Editable editable) { final SegmentList segList; if (editable instanceof SegmentList) { segList = (SegmentList) editable; } else { segList = new SegmentList(); // note: we can only set the wrapper // if we're looking at a real TMA solution if (secondaryTrack instanceof TrackWrapper) { segList.setWrapper((TrackWrapper) secondaryTrack); } // ok, add this segment to the list segList.addSegment((TrackSegment) editable); } return segList; } /** * determine if this time series contains many identical values - this is an indicator for data * coming from a simulator, for which turns can't be determined by our peak tracking algorithm. * * @param dataset * @return */ private static boolean containsIdenticalValues(final TimeSeries dataset, final Integer NumMatches) { final int num = dataset.getItemCount(); final int numMatches; if (NumMatches != null) { numMatches = NumMatches; } else { final double MATCH_PROPORTION = 0.1; numMatches = (int) (num * MATCH_PROPORTION); } double lastCourse = 0d; int matchCount = 0; for (int ctr = 0; ctr < num; ctr++) { final TimeSeriesDataItem thisItem = dataset.getDataItem(ctr); final double thisCourse = (Double) thisItem.getValue(); if (thisCourse == lastCourse) { // ok, count the duplicates matchCount++; if (matchCount >= numMatches) { return true; } } else { matchCount = 0; } lastCourse = thisCourse; } return false; } private static void generateInterpolatedDoublet(final HiResDate requiredTime, final TargetDoublet doublet, final TrackSegment segment) { // ok, we'll interpolate the nearest value FixWrapper before = null; FixWrapper after = null; final Enumeration<Editable> fixes = segment.elements(); while (fixes.hasMoreElements() && after == null) { final FixWrapper thisF = (FixWrapper) fixes.nextElement(); final HiResDate thisTime = thisF.getDTG(); if (before == null || thisTime.lessThan(requiredTime)) { before = thisF; } else if (thisTime.greaterThanOrEqualTo(requiredTime)) { after = thisF; } } // just check if we're on one of the values final FixWrapper toUse; if (before != null && before.getDTG().equals(requiredTime)) { toUse = before; } else if (after != null && after.getDTG().equals(requiredTime)) { toUse = after; } else { // ok, we've now boxed the required value toUse = FixWrapper.interpolateFix(before, after, requiredTime); final FixWrapper beforeF = before; final FixWrapper afterF = after; // note. the interpolated fix needs to move, if the segments moves final PropertyChangeListener newListener = new PrivatePropertyChangeListener(segment.getWrapper(), PlainWrapper.LOCATION_CHANGED) { @Override public void propertyChange(final PropertyChangeEvent evt) { final FixWrapper tmpFix = FixWrapper.interpolateFix(beforeF, afterF, requiredTime); toUse.setLocation(tmpFix.getLocation()); } }; segment.getWrapper().addPropertyChangeListener( PlainWrapper.LOCATION_CHANGED, newListener); } doublet.targetFix = toUse; doublet.targetParent = segment; } /** * sort out data of interest * */ public static TreeSet<Doublet> getDoublets(final List<TrackWrapper> primaries, final ISecondaryTrack targetTrack, final boolean onlyVis, final boolean needBearing, final boolean needFrequency) { final TreeSet<Doublet> res = new TreeSet<Doublet>(); // note - we have to inject some listeners, so that // interpolated fixes know when their parent has updated. // each time we come in here, we delete existing ones, // as housekeeping. clearPrivateListeners(targetTrack); final Vector<TrackSegment> theSegments; if (targetTrack != null) { theSegments = getTargetLegs(targetTrack); } else { theSegments = null; } for (final TrackWrapper sensorHost : primaries) { // loop through our sensor data final Enumeration<Editable> sensors = sensorHost.getSensors().elements(); while (sensors.hasMoreElements()) { final SensorWrapper sensor = (SensorWrapper) sensors.nextElement(); storeDoubletsFor(sensor, res, onlyVis, needBearing, needFrequency, theSegments, sensorHost, targetTrack); } // loop through sensors } // loop through primaries return res; } private static TimeSeries getSinglePointCourseData( final TrackWrapper primaryTrack, final ISecondaryTrack secondaryTrack, final boolean flipAxes) { final TimeSeries osCourseValues = new TimeSeries(primaryTrack.getName()); // get the single location final FixWrapper loc = (FixWrapper) primaryTrack.getPositionIterator() .nextElement(); final Enumeration<Editable> segments = secondaryTrack.segments(); while (segments.hasMoreElements()) { final Editable nextE = segments.nextElement(); // produce a list of segments even if there's actually only one final SegmentList segList = collateSegments(secondaryTrack, nextE); final Enumeration<Editable> segIter = segList.elements(); while (segIter.hasMoreElements()) { final TrackSegment segment = (TrackSegment) segIter.nextElement(); final Enumeration<Editable> enumer = segment.elements(); while (enumer.hasMoreElements()) { final FixWrapper thisTgtFix = (FixWrapper) enumer.nextElement(); double ownshipCourse = MWC.Algorithms.Conversions.Rads2Degs(loc .getCourse()); // stop, stop, stop - do we wish to plot bearings in the +/- 180 domain? if (flipAxes && ownshipCourse > 180) { ownshipCourse -= 360; } final FixedMillisecond thisMilli = new FixedMillisecond(thisTgtFix .getDateTimeGroup().getDate().getTime()); final ColouredDataItem crseBearing = new ColouredDataItem(thisMilli, ownshipCourse, loc.getColor(), true, null, true, true); osCourseValues.add(crseBearing); } } } return osCourseValues; } private static TimeSeries getStandardCourseData( final TrackWrapper primaryTrack, final boolean flipAxes, final HiResDate startDTG, final HiResDate endDTG) { final TimeSeries osCourseValues = new TimeSeries(primaryTrack.getName()); // loop through using the iterator final Enumeration<Editable> pIter = primaryTrack.getPositionIterator(); final TimePeriod validPeriod = new TimePeriod.BaseTimePeriod(startDTG, endDTG); final List<Editable> validItems = new LinkedList<Editable>(); while (pIter.hasMoreElements()) { final FixWrapper fw = (FixWrapper) pIter.nextElement(); if (validPeriod.contains(fw.getDateTimeGroup())) { validItems.add(fw); } else { // have we passed the end of the requested period? if (fw.getDateTimeGroup().greaterThan(endDTG)) { // ok, drop out break; } } } // ok, now go through the list final Iterator<Editable> vIter = validItems.iterator(); final int freq = Math.max(1, validItems.size() / MAX_ITEMS_TO_PLOT); int ctr = 0; while (vIter.hasNext()) { final Editable ed = vIter.next(); if (ctr++ % freq == 0) { final FixWrapper fw = (FixWrapper) ed; final FixedMillisecond thisMilli = new FixedMillisecond(fw .getDateTimeGroup().getDate().getTime()); double ownshipCourse = MWC.Algorithms.Conversions.Rads2Degs(fw .getCourse()); // stop, stop, stop - do we wish to plot bearings in the +/- 180 domain? if (flipAxes && ownshipCourse > 180) { ownshipCourse -= 360; } final ColouredDataItem crseBearing = new ColouredDataItem(thisMilli, ownshipCourse, fw.getColor(), true, null, true, true); osCourseValues.add(crseBearing); } } return osCourseValues; } /** * * @param workingFix * pre-existing fix object, to stop us repeatedly creating it * @param theSegments * the segment within this track * @param requiredTime * the time we need data for * @param interpFix * whether to only accept a target fix within 1 second of the target time, or to * interpolate the nearest one * @param allowInfill * whether we generate a doublet for dynamic infill segments * @return a Doublet containing the relevant data */ private static TargetDoublet getTargetDoublet(final FixWrapper workingFix, final Vector<TrackSegment> theSegments, final HiResDate requiredTime, final boolean interpFix, final boolean allowInfill) { final TargetDoublet doublet = new TargetDoublet(); if (theSegments != null && !theSegments.isEmpty()) { final Iterator<TrackSegment> iter = theSegments.iterator(); while (iter.hasNext()) { final TrackSegment ts = iter.next(); if (ts.endDTG() == null || ts.startDTG() == null) { // ok, move onto the next segment CorePlugin.logError(IStatus.WARNING, "Warning, segment is missing data:" + ts, null); continue; } final TimePeriod validPeriod = new TimePeriod.BaseTimePeriod(ts .startDTG(), ts.endDTG()); if (validPeriod.contains(requiredTime)) { // if this is an infill, then we're relaxed about the errors if (ts instanceof DynamicInfillSegment) { // aaah, but are we interested in infill segments? if (allowInfill) { handleDynamicInfill(workingFix, requiredTime, doublet, ts); } } else { // see if we're allowing an interpolated fix if (interpFix) { generateInterpolatedDoublet(requiredTime, doublet, ts); break; } else { // ok, check we have a TMA fix almost exactly at this time final Enumeration<Editable> fixes = ts.elements(); while (fixes.hasMoreElements()) { final FixWrapper thisF = (FixWrapper) fixes.nextElement(); // note: workaround. When we've merged the track, // the new legs are actually one millisecond later. // workaround this. final long timeDiffMicros = Math.abs(thisF.getDTG().getMicros() - requiredTime.getMicros()); if (timeDiffMicros <= 1000) { // sorted. here we go doublet.targetParent = ts; doublet.targetFix = thisF; // ok, done. break; } } } } } } } return doublet; } private static Vector<TrackSegment> getTargetLegs( final ISecondaryTrack targetTrack) { final Vector<TrackSegment> _theSegments = new Vector<TrackSegment>(); final Enumeration<Editable> trkData = targetTrack.segments(); while (trkData.hasMoreElements()) { final Editable thisI = trkData.nextElement(); if (thisI instanceof SegmentList) { final SegmentList thisList = (SegmentList) thisI; final Enumeration<Editable> theElements = thisList.elements(); while (theElements.hasMoreElements()) { final TrackSegment ts = (TrackSegment) theElements.nextElement(); if (ts.getVisible()) { _theSegments.add(ts); } } } else if (thisI instanceof TrackSegment) { final TrackSegment ts = (TrackSegment) thisI; _theSegments.add(ts); } } return _theSegments; } private static Color halfWayColor(final Color a, final Color b) { final int red = (a.getRed() + b.getRed()) / 2; final int blue = (a.getBlue() + b.getBlue()) / 2; final int green = (a.getGreen() + b.getGreen()) / 2; return new Color(red, blue, green); } private static void handleDynamicInfill(final FixWrapper workingFix, final HiResDate requiredTime, final TargetDoublet doublet, final TrackSegment segment) { // sorted. here we go doublet.targetParent = segment; // create an object with the right time workingFix.getFix().setTime(requiredTime); // and find any matching items final SortedSet<Editable> items = segment.tailSet(workingFix); if (!items.isEmpty()) { doublet.targetFix = (FixWrapper) items.first(); } } /** * is this a multi-sensor dataset? * * @param doublets * @return */ private final static boolean isMultiSensor(final TreeSet<Doublet> doublets) { final Iterator<Doublet> iter = doublets.iterator(); SensorWrapper lastS = null; TrackWrapper lastT = null; while (iter.hasNext()) { final Doublet next = iter.next(); final SensorWrapper thisS = next.getSensorCut().getSensor(); final TrackWrapper thisT = thisS.getHost(); if (lastS == null) { lastS = thisS; } else if (!lastS.equals(thisS)) { return true; } if (lastT == null) { lastT = thisT; } else if (!lastT.equals(thisT)) { return true; } } return false; } /** * utility method to add a value to a series, calculating the series if necessary * * @param collection * parent collection * @param seriesName * name of series of operate on * @param bFreq * data item to add */ static private void safelyAddItem(final TimeSeriesCollection collection, final String seriesName, final TimeSeriesDataItem bFreq) { TimeSeries series = collection.getSeries(seriesName); if (series == null) { series = new TimeSeries(seriesName); collection.addSeries(series); } series.add(bFreq); } public static ArrayList<Zone> sliceOwnship(final TimeSeries osCourse, final ZoneChart.ColorProvider colorProvider) { // make a decision on which ownship slicer to use final IOwnshipLegDetector detector; if (containsIdenticalValues(osCourse, null)) { detector = new ArtificalLegDetector(); } else { detector = new PeakTrackingOwnshipLegDetector(); } final int num = osCourse.getItemCount(); final long[] times = new long[num]; final double[] speeds = new double[num]; final double[] courses = new double[num]; for (int ctr = 0; ctr < num; ctr++) { final TimeSeriesDataItem thisItem = osCourse.getDataItem(ctr); final FixedMillisecond thisM = (FixedMillisecond) thisItem.getPeriod(); times[ctr] = thisM.getMiddleMillisecond(); speeds[ctr] = 0; courses[ctr] = (Double) thisItem.getValue(); } final List<LegOfData> legs = detector.identifyOwnshipLegs(times, speeds, courses, 5, Precision.LOW); final ArrayList<Zone> res = new ArrayList<Zone>(); for (final LegOfData leg : legs) { final Zone newZone = new Zone(leg.getStart(), leg.getEnd(), colorProvider .getZoneColor()); res.add(newZone); } return res; } private static ColouredDataItem storeAmbiguousCut(final double ambigBearing, final boolean flipAxes, final boolean bearingToPort, final Color thisColor, final Doublet thisD, final Color grayShade, final RegularTimePeriod thisMilli, final boolean parentIsNotDynamic) { double theBearing = ambigBearing; // put the ambig baering into the correct domain while (theBearing < 0) { theBearing += 360; } if (flipAxes && theBearing > 180) { theBearing -= 360; } // make the color darker, if we're on the stbd bearnig final Color ambigColor; if (bearingToPort) { ambigColor = thisColor.darker(); } else { ambigColor = thisColor; } // if this cut has been resolved, we don't show a symbol // for the ambiguous cut final boolean showSymbol = true; final Color color = thisD.getHasBeenResolved() ? grayShade : ambigColor; final ColouredDataItem amBearing = new ColouredDataItem(thisMilli, theBearing, color, false, null, showSymbol, parentIsNotDynamic, thisD .getSensorCut()); return amBearing; } private static void storeDoubletsFor(final SensorWrapper sensor, final TreeSet<Doublet> res, final boolean onlyVis, final boolean needBearing, final boolean needFrequency, final Vector<TrackSegment> theSegments, final WatchableList sensorHost, final ISecondaryTrack targetTrack) { if (!onlyVis || (onlyVis && sensor.getVisible())) { // friendly fix-wrapper to save us repeatedly creating it final FixWrapper index = new FixWrapper(new Fix(null, new WorldLocation(0, 0, 0), 0.0, 0.0)); final Enumeration<Editable> cuts = sensor.elements(); while (cuts.hasMoreElements()) { final SensorContactWrapper scw = (SensorContactWrapper) cuts .nextElement(); if (!onlyVis || (onlyVis && scw.getVisible())) { // is this cut suitable for what we're looking for? if (needBearing) { if (!scw.getHasBearing()) { continue; } } // aaah, but does it meet the frequency requirement? if (needFrequency) { if (!scw.getHasFrequency()) { continue; } } storeDoubletsForThisCut(scw, res, needFrequency, index, theSegments, sensorHost, targetTrack); } // if cut is visible } // loop through cuts } // if sensor is visible } private static void storeDoubletsForThisCut(final SensorContactWrapper scw, final TreeSet<Doublet> res, final boolean needFrequency, final FixWrapper index, final Vector<TrackSegment> theSegments, final WatchableList sensorHost, final ISecondaryTrack targetTrack) { /** * Since the contact is travelling in a straight, on steady speed when on a leg, it's perfectly * OK to interpolate a target position for any sensor time. */ final boolean interpFix = true;// needFrequency; /** * for frequency data we don't generate a double for dynamic infills, since we have low * confidence in the target course/speed */ final boolean allowInfill = !needFrequency; final TargetDoublet doublet = getTargetDoublet(index, theSegments, scw .getDTG(), interpFix, allowInfill); final FixWrapper hostFix; final Watchable[] matches = sensorHost.getNearestTo(scw.getDTG()); if (matches != null && matches.length == 1) { hostFix = (FixWrapper) matches[0]; } else { hostFix = null; } if (doublet.targetFix != null && hostFix != null) { final Doublet thisDub = new Doublet(scw, doublet.targetFix, doublet.targetParent, hostFix); // if we've no target track add all the points if (targetTrack == null) { // store our data res.add(thisDub); } else { // if we've got a target track we only add points // for which we // have // a target location if (doublet.targetFix != null) { // store our data res.add(thisDub); } } // if we know the track } // if we find a match else if (hostFix != null && (doublet.targetFix == null || targetTrack == null)) { // no target data, just use ownship sensor data final Doublet thisDub = new Doublet(scw, null, null, hostFix); res.add(thisDub); } } private static void storeMeasuredBearing(final boolean multiSensor, final SensorWrapper sensor, final double measuredBearing, final boolean flipAxes, final RegularTimePeriod thisMilli, final Color bearingColor, final boolean parentIsNotDynamic, final Doublet thisD, final TimeSeriesCollection measuredValuesColl) { final String seriesName = multiSensor ? BaseStackedDotsView.MEASURED_VALUES + sensor.getName() : BaseStackedDotsView.MEASURED_VALUES; double theBearing = measuredBearing; // put the measured bearing back in the positive domain if (theBearing < 0) { theBearing += 360d; } // stop, stop, stop - do we wish to plot bearings in the +/- 180 domain? if (flipAxes && theBearing > 180) { theBearing -= 360; } final ColouredDataItem mBearing = new ColouredDataItem(thisMilli, theBearing, bearingColor, false, null, true, parentIsNotDynamic, thisD .getSensorCut()); safelyAddItem(measuredValuesColl, seriesName, mBearing); } private static void storeTargetCourseSpeedData( final ISecondaryTrack _secondaryTrack, final HiResDate startDTG, final HiResDate endDTG, final boolean flipAxes, final TimeSeries tgtCourseValues, final TimeSeries tgtSpeedValues) { // sort out the target course/speed final Enumeration<Editable> segments = _secondaryTrack.segments(); final TimePeriod period = new TimePeriod.BaseTimePeriod(startDTG, endDTG); while (segments.hasMoreElements()) { final Editable nextE = segments.nextElement(); // if there's just one segment - then we need to wrap it, else return // the list of segments final SegmentList segList = collateSegments(_secondaryTrack, nextE); final Enumeration<Editable> segIter = segList.elements(); while (segIter.hasMoreElements()) { final TrackSegment segment = (TrackSegment) segIter.nextElement(); // is this an infill segment final boolean isInfill = segment instanceof DynamicInfillSegment; // check it has values, and is in range if (segment.isEmpty() || segment.startDTG().greaterThan(endDTG) || segment.endDTG().lessThan(startDTG)) { // ok, we can skip this one } else { final Enumeration<Editable> points = segment.elements(); Double lastCourse = null; while (points.hasMoreElements()) { final FixWrapper fw = (FixWrapper) points.nextElement(); if (period.contains(fw.getDateTimeGroup())) { // ok, create a point for it final FixedMillisecond thisMilli = new FixedMillisecond(fw .getDateTimeGroup().getDate().getTime()); double tgtCourse = MWC.Algorithms.Conversions.Rads2Degs(fw .getCourse()); final double tgtSpeed = fw.getSpeed(); // see if we need to change the domain of the course to match // the previous value if (lastCourse != null) { if (tgtCourse - lastCourse > 190) { tgtCourse = tgtCourse - 360; } else if (tgtCourse - lastCourse < -180) { tgtCourse = 360 + tgtCourse; } } lastCourse = tgtCourse; // trim to +/- domain if we're flipping axes if (flipAxes && tgtCourse > 180) { tgtCourse -= 360; } // we use the raw color for infills, to help find which // infill we're referring to (esp in random infills) final Color courseColor; final Color speedColor; if (isInfill) { courseColor = fw.getColor(); speedColor = fw.getColor(); } else { courseColor = fw.getColor().brighter(); speedColor = fw.getColor().darker(); } tgtCourseValues.add(new ColouredDataItem(thisMilli, tgtCourse, courseColor, isInfill, null, true, true)); tgtSpeedValues.add(new ColouredDataItem(thisMilli, tgtSpeed, speedColor, isInfill, null, true, true)); } } } } } } /** * the track being dragged */ private TrackWrapper _primaryTrack; /** * introduce support for multiple primary tracks * */ private final List<TrackWrapper> _primaryTracks = new ArrayList<TrackWrapper>(); /** * the secondary track we're monitoring */ private ISecondaryTrack _secondaryTrack; /** * the set of points to watch on the primary track. This is stored as a sorted set because if we * have multiple sensors they may be suppled in chronological order, or they may represent * overlapping time periods */ private TreeSet<Doublet> _primaryDoublets; private TimeSeriesCollection getAllSensorCuts(final boolean onlyVis, final boolean flipAxes, final TimePeriod sensorPeriod) { final TimeSeriesCollection allCutsColl = new TimeSeriesCollection(); for (final TrackWrapper primaryTrack : getPrimaryTracks()) { final List<SensorContactWrapper> theBearings = getBearings(primaryTrack, onlyVis, sensorPeriod); for (final SensorContactWrapper cut : theBearings) { double theBearing; final String sensorName = cut.getSensorName(); // ensure it's in the positive domain if (cut.getBearing() < 0) { theBearing = cut.getBearing() + 360; } else { theBearing = cut.getBearing(); } // put in the correct domain, if necessary if (flipAxes) { if (theBearing > 180d) { theBearing -= 360d; } } else { if (theBearing < 0) { theBearing += 360; } } final TimeSeriesDataItem item = new TimeSeriesDataItem( new FixedMillisecond(cut.getDTG().getDate().getTime()), theBearing); safelyAddItem(allCutsColl, sensorName, item); } } return allCutsColl; } public List<SensorContactWrapper> getBearings(final TrackWrapper primaryTrack, final boolean onlyVis, final TimePeriod targetPeriod) { final List<SensorContactWrapper> res = new ArrayList<SensorContactWrapper>(); // loop through our sensor data final Enumeration<Editable> sensors = primaryTrack.getSensors().elements(); if (sensors != null) { while (sensors.hasMoreElements()) { final SensorWrapper wrapper = (SensorWrapper) sensors.nextElement(); if (!onlyVis || (onlyVis && wrapper.getVisible())) { final Enumeration<Editable> cuts = wrapper.elements(); while (cuts.hasMoreElements()) { final SensorContactWrapper scw = (SensorContactWrapper) cuts .nextElement(); if (!onlyVis || (onlyVis && scw.getVisible())) { if (targetPeriod == null || targetPeriod.contains(scw.getDTG())) { res.add(scw); } // if we find a match } // if cut is visible } // loop through cuts } // if sensor is visible } // loop through sensors } // if there are sensors return res; } public TreeSet<Doublet> getDoublets(final boolean onlyVis, final boolean needBearing, final boolean needFrequency) { return getDoublets(_primaryTracks, _secondaryTrack, onlyVis, needBearing, needFrequency); } public TrackWrapper getPrimaryTrack() { return _primaryTrack; } public List<TrackWrapper> getPrimaryTracks() { return _primaryTracks; } public ISecondaryTrack getSecondaryTrack() { return _secondaryTrack; } /** * initialise the data, check we've got sensor data & the correct number of visible tracks * * @param showError * @param onlyVis * @param holder */ public void initialise(final SwitchableTrackProvider provider, final boolean showError, final boolean onlyVis, final ErrorLogger logger, final String dataType, final boolean needBrg, final boolean needFreq) { _secondaryTrack = null; _primaryTrack = null; _primaryTracks.clear(); // do we have some data? if (provider == null) { // output error message logger.logError(IStatus.INFO, "Please open a Debrief plot", null); return; } if (!provider.isPopulated() || provider.getPrimaryTracks() == null || provider.getPrimaryTracks().length == 0) { logger.logError(IStatus.INFO, "A primary track must be placed on the Tote", null); return; } else { final WatchableList[] primaryTracks = provider.getPrimaryTracks(); for (final WatchableList priTrk : primaryTracks) { if (priTrk instanceof TrackWrapper) { _primaryTrack = (TrackWrapper) priTrk; _primaryTracks.add((TrackWrapper) priTrk); } else { logger.logError(IStatus.INFO, "The primary track must be a vehicle track", null); return; } } } // now the sec track final WatchableList[] secs = provider.getSecondaryTracks(); // any? if ((secs == null) || (secs.length == 0)) { logger.logError(IStatus.INFO, "No secondary track assigned", null); return; } else { // too many? if (secs.length > 1) { logger.logError(IStatus.INFO, "Only 1 secondary track may be on the tote", null); return; } // correct sort? final WatchableList secTrk = secs[0]; if (!(secTrk instanceof ISecondaryTrack)) { logger.logError(IStatus.INFO, "The secondary track must be a vehicle track", null); return; } else { _secondaryTrack = (ISecondaryTrack) secTrk; } } // must have worked, hooray logger.logError(IStatus.OK, null, null); // ok, get the positions updateDoublets(onlyVis, needBrg, needFreq); } /** * clear our data, all is finished */ public void reset() { if (_primaryDoublets != null) { _primaryDoublets.clear(); } _primaryDoublets = null; _primaryTrack = null; _secondaryTrack = null; } /** * ok, our track has been dragged, calculate the new series of offsets * * @param linePlot * @param dotPlot * @param onlyVis * @param showCourse * @param b * @param holder * @param logger * @param targetCourseSeries * @param targetSpeedSeries * @param ownshipCourseSeries * @param targetBearingSeries * @param overviewSpeedRenderer * @param _overviewCourseRenderer * * @param currentOffset * how far the current track has been dragged */ public void updateBearingData(final TimeSeriesCollection dotPlotData, final TimeSeriesCollection linePlotData, final SwitchableTrackProvider tracks, final boolean onlyVis, final boolean showCourse, final boolean flipAxes, final ErrorLogger logger, final boolean updateDoublets, final TimeSeriesCollection targetCourseSeries, final TimeSeriesCollection targetSpeedSeries, final TimeSeriesCollection measuredValuesColl, final TimeSeriesCollection ambigValuesColl, final TimeSeries ownshipCourseSeries, final TimeSeries targetBearingSeries, final TimeSeries targetCalculatedSeries, final ResidualXYItemRenderer overviewSpeedRenderer, final WrappingResidualRenderer overviewCourseRenderer, final SetBackgroundShade backShader) { // do we even have a primary track if (_primaryTrack == null) { // ok, clear the data linePlotData.removeAllSeries(); dotPlotData.removeAllSeries(); targetCourseSeries.removeAllSeries(); targetSpeedSeries.removeAllSeries(); return; } // ok, find the track wrappers if (_secondaryTrack == null) { initialise(tracks, false, onlyVis, logger, "Bearing", true, false); } // did it work? // if (_secondaryTrack == null) // return; // ok - the tracks have moved. better update the doublets if (updateDoublets) { updateDoublets(onlyVis, true, false); } // aah - but what if we've ditched our doublets? if ((_primaryDoublets == null) || (_primaryDoublets.size() == 0)) { // better clear the plot dotPlotData.removeAllSeries(); linePlotData.removeAllSeries(); return; } // check if we've got multi sensor final boolean multiSensor = isMultiSensor(_primaryDoublets); // create the collection of series final TimeSeriesCollection calculatedSeries = new TimeSeriesCollection(); final TimeSeriesCollection ownshipCourseColl = new TimeSeriesCollection(); // the previous steps occupy some time. // just check we haven't lost the primary track while they were running if (_primaryTrack == null) { return; } // produce a dataset for each track final TimeSeries tgtCourseValues = new TimeSeries("Tgt Course"); final TimeSeries tgtSpeedValues = new TimeSeries("Tgt Speed"); // createa list of series, so we can pause their updates final List<TimeSeries> sList = new Vector<TimeSeries>(); sList.add(tgtCourseValues); sList.add(tgtSpeedValues); sList.add(targetCalculatedSeries); sList.add(targetBearingSeries); sList.add(ownshipCourseSeries); final List<TimeSeriesCollection> tList = new Vector<TimeSeriesCollection>(); tList.add(measuredValuesColl); tList.add(ownshipCourseColl); tList.add(targetCourseSeries); tList.add(targetSpeedSeries); tList.add(dotPlotData); tList.add(linePlotData); tList.add(calculatedSeries); tList.add(ambigValuesColl); // ok, wrap the switching on/off of notify in try/catch, // to be sure to switch notify back on at end try { // now switch off updates for (final TimeSeriesCollection series : tList) { series.setNotify(false); series.removeAllSeries(); } for (final TimeSeries series : sList) { series.setNotify(false); // and clear the list series.clear(); } // create the color for resolved ambig data final Color grayShade = new Color(155, 155, 155, 50); // ok, run through the points on the primary track final Iterator<Doublet> iter = _primaryDoublets.iterator(); while (iter.hasNext()) { final Doublet thisD = iter.next(); final boolean parentIsNotDynamic = thisD.getTargetTrack() == null || !(thisD.getTargetTrack() instanceof DynamicInfillSegment); try { // obvious stuff first (stuff that doesn't need the tgt data) final Color thisColor = thisD.getColor(); final double measuredBearing = thisD.getMeasuredBearing(); double ambigBearing = thisD.getAmbiguousMeasuredBearing(); final HiResDate currentTime = thisD.getDTG(); final FixedMillisecond thisMilli = new FixedMillisecond(currentTime .getDate().getTime()); final boolean hasAmbiguous = !Double.isNaN(ambigBearing); // ok, we need to make the color darker if it's starboard final boolean bearingToPort = thisD.getSensorCut().isBearingToPort(); // make the color darker, if it's to stbg final Color bearingColor; if (bearingToPort) { bearingColor = thisColor; } else { bearingColor = thisColor.darker(); } final SensorWrapper sensor = thisD.getSensorCut().getSensor(); storeMeasuredBearing(multiSensor, sensor, measuredBearing, flipAxes, thisMilli, bearingColor, parentIsNotDynamic, thisD, measuredValuesColl); if (hasAmbiguous) { final String ambSeriesName = multiSensor ? BaseStackedDotsView.MEASURED_VALUES + sensor.getName() + "(A)" : BaseStackedDotsView.MEASURED_VALUES + "(A)"; final ColouredDataItem amBearing = storeAmbiguousCut(ambigBearing, flipAxes, bearingToPort, thisColor, thisD, grayShade, thisMilli, parentIsNotDynamic); safelyAddItem(ambigValuesColl, ambSeriesName, amBearing); } // do we have target data? if (thisD.getTarget() != null) { // and has this target fix know it's location? // (it may not, if it's a relative leg that has been extended) if (thisD.getTarget().getFixLocation() != null) { double calculatedBearing = thisD.getCalculatedBearing(null, null); // note: now that we're allowing multi-sensor TMA, we should color the // errors acccording to the sensor color (not the target color) final Color error = thisD.getColor(); final Color calcColor = thisD.getTarget().getColor(); final double thisTrueError = thisD.calculateBearingError( measuredBearing, calculatedBearing); if (flipAxes) { if (calculatedBearing > 180) { calculatedBearing -= 360; } } else { if (calculatedBearing < 0) { calculatedBearing += 360; } } final Color brgColor; if (bearingToPort) { brgColor = error; } else { brgColor = error.darker(); } final ColouredDataItem newTrueError = new ColouredDataItem( thisMilli, thisTrueError, brgColor, false, null, true, parentIsNotDynamic, thisD.getTarget()); final Color halfBearing = halfWayColor(calcColor, brgColor); final ColouredDataItem cBearing = new ColouredDataItem(thisMilli, calculatedBearing, halfBearing, true, null, true, parentIsNotDynamic, thisD.getTarget()); final String sensorName = thisD.getSensorCut().getSensorName(); // ok, get this error final String errorName = multiSensor ? BaseStackedDotsView.ERROR_VALUES + sensorName : BaseStackedDotsView.ERROR_VALUES; safelyAddItem(dotPlotData, errorName, newTrueError); // get the calc series for this one final String calcName = multiSensor ? StackedDotHelper.CALCULATED_VALUES + sensorName : StackedDotHelper.CALCULATED_VALUES; safelyAddItem(calculatedSeries, calcName, cBearing); // and the ambiguous error, if it hasn't been resolved if (!thisD.getHasBeenResolved()) { if (flipAxes) { if (ambigBearing > 180) { ambigBearing -= 360; } } final Color ambigColor; if (bearingToPort) { ambigColor = error.darker(); } else { ambigColor = error; } final double thisAmnigError = thisD.calculateBearingError( ambigBearing, calculatedBearing); final ColouredDataItem newAmbigError = new ColouredDataItem( thisMilli, thisAmnigError, ambigColor, false, null, true, parentIsNotDynamic); final String ambErrorName = multiSensor ? BaseStackedDotsView.ERROR_VALUES + "_amb_" + sensorName : BaseStackedDotsView.ERROR_VALUES + "_amb_"; safelyAddItem(dotPlotData, ambErrorName, newAmbigError); } } } } catch (final SeriesException e) { CorePlugin.logError(IStatus.INFO, "some kind of trip whilst updating bearing plot", e); } } // just double-check we've still got our primary doublets if (_primaryDoublets == null) { CorePlugin.logError(IStatus.WARNING, "FOR SOME REASON PRIMARY DOUBLETS IS NULL - INVESTIGATE", null); return; } if (_primaryDoublets.size() == 0) { CorePlugin.logError(IStatus.WARNING, "FOR SOME REASON PRIMARY DOUBLETS IS ZERO LENGTH - INVESTIGATE", null); return; } // right, we do course in a special way, since it isn't dependent on the // target track. Do course here. final HiResDate startDTG = _primaryDoublets.first().getDTG(); final HiResDate endDTG = _primaryDoublets.last().getDTG(); if (startDTG.greaterThan(endDTG)) { System.err.println("in the wrong order, start:" + startDTG + " end:" + endDTG); return; } // special case - if the primary track is a single location for (final TrackWrapper thisPrimary : getPrimaryTracks()) { final TimeSeries osCourseValues; if (_primaryTrack.isSinglePointTrack()) { // ok, it's a single point. We'll use the sensor cut times for the course data osCourseValues = getSinglePointCourseData(thisPrimary, _secondaryTrack, flipAxes); } else { osCourseValues = getStandardCourseData(thisPrimary, flipAxes, startDTG, endDTG); } ownshipCourseColl.addSeries(osCourseValues); } if (_secondaryTrack != null) { storeTargetCourseSpeedData(_secondaryTrack, startDTG, endDTG, flipAxes, tgtCourseValues, tgtSpeedValues); } // sort out the sensor cuts (all of them, not just those when we have target legs) final TimePeriod sensorPeriod; if (_secondaryTrack != null) { sensorPeriod = new TimePeriod.BaseTimePeriod(_secondaryTrack .getStartDTG(), _secondaryTrack.getEndDTG()); } else { sensorPeriod = null; } final TimeSeriesCollection allCutsColl = getAllSensorCuts(onlyVis, flipAxes, sensorPeriod); final Iterator<?> mIter = measuredValuesColl.getSeries().iterator(); while (mIter.hasNext()) { final TimeSeries series = (TimeSeries) mIter.next(); linePlotData.addSeries(series); } final Iterator<?> aIter = ambigValuesColl.getSeries().iterator(); while (aIter.hasNext()) { final TimeSeries series = (TimeSeries) aIter.next(); linePlotData.addSeries(series); } final Iterator<?> cIter = calculatedSeries.getSeries().iterator(); while (cIter.hasNext()) { final TimeSeries series = (TimeSeries) cIter.next(); linePlotData.addSeries(series); } if (tgtCourseValues.getItemCount() > 0) { targetCourseSeries.addSeries(tgtCourseValues); // ok, sort out the renderer if (overviewCourseRenderer != null) { overviewCourseRenderer.setLightweightMode(tgtCourseValues .getItemCount() > MAX_ITEMS_TO_PLOT); } } if (tgtSpeedValues.getItemCount() > 0) { targetSpeedSeries.addSeries(tgtSpeedValues); if (overviewSpeedRenderer != null) { overviewSpeedRenderer.setLightweightMode(tgtSpeedValues .getItemCount() > MAX_ITEMS_TO_PLOT); } } if (showCourse) { final Iterator<?> oIter = ownshipCourseColl.getSeries().iterator(); while (oIter.hasNext()) { final TimeSeries thisOwnshipSeries = (TimeSeries) oIter.next(); targetCourseSeries.addSeries(thisOwnshipSeries); // and the course data for the zone chart if (!thisOwnshipSeries.isEmpty() && ownshipCourseSeries != null && ownshipCourseSeries.isEmpty()) { // note - only populate it, if it's currently empty ownshipCourseSeries.addAndOrUpdate(thisOwnshipSeries); } } } final Iterator<?> cIter2 = calculatedSeries.getSeries().iterator(); while (cIter2.hasNext()) { final TimeSeries series = (TimeSeries) cIter2.next(); targetCalculatedSeries.addAndOrUpdate(series); } // and the bearing data for the zone chart final Iterator<?> cutsIter = allCutsColl.getSeries().iterator(); while (cutsIter.hasNext()) { final TimeSeries thisS = (TimeSeries) cutsIter.next(); if (targetBearingSeries != null && targetBearingSeries.isEmpty()) { // note - only populate it, if it's currently empty targetBearingSeries.addAndOrUpdate(thisS); } else { // ok, ignore it. we only assign the data in the first pass } } // find the color for maximum value in the error series, if we have error data if (dotPlotData.getSeriesCount() > 0) { // retrieve the cut-off value final double cutOffValue; final String prefValue = Application.getThisProperty( RelativeTMASegment.CUT_OFF_VALUE_DEGS); if (prefValue != null && prefValue.length() > 0 && Double.valueOf( prefValue) != null) { cutOffValue = Double.valueOf(prefValue); } else { cutOffValue = 3d; } final Paint errorColor = calculateErrorShadeFor(dotPlotData, cutOffValue); // dotPlot.setBackgroundPaint(errorColor); backShader.setShade(errorColor); } } finally { // now switch off updates for (final Series series : sList) { series.setNotify(true); } // now switch off updates for (final TimeSeriesCollection series : tList) { series.setNotify(true); } } } /** * go through the tracks, finding the relevant position on the other track. * */ private void updateDoublets(final boolean onlyVis, final boolean needBearing, final boolean needFreq) { // ok - we're now there // so, do we have primary and secondary tracks? if (_primaryTrack != null) { // cool sort out the list of sensor locations for these tracks _primaryDoublets = getDoublets(_primaryTracks, _secondaryTrack, onlyVis, needBearing, needFreq); } } /** * ok, our track has been dragged, calculate the new series of offsets * * @param linePlot * @param dotPlot * @param onlyVis * @param holder * @param logger * @param fZeroMarker * * @param currentOffset * how far the current track has been dragged */ public void updateFrequencyData(final TimeSeriesCollection dotPlotData, final TimeSeriesCollection linePlotData, final SwitchableTrackProvider tracks, final boolean onlyVis, final ErrorLogger logger, final boolean updateDoublets, final SetBackgroundShade backShader, final ColourStandardXYItemRenderer lineRend) { // do we have anything? if (_primaryTrack == null) { return; } // ok, find the track wrappers if (_secondaryTrack == null) { initialise(tracks, false, onlyVis, logger, "Frequency", false, true); } // ok - the tracks have moved. better update the doublets if (updateDoublets) { updateDoublets(onlyVis, false, true); } // aah - but what if we've ditched our doublets? if ((_primaryDoublets == null) || (_primaryDoublets.size() == 0)) { // better clear the plot dotPlotData.removeAllSeries(); linePlotData.removeAllSeries(); return; } // create the collection of series // final TimeSeriesCollection errorSeries = new TimeSeriesCollection(); // final TimeSeriesCollection actualSeries = new TimeSeriesCollection(); final TimeSeriesCollection baseValuesSeries = new TimeSeriesCollection(); if (_primaryTrack == null) { return; } final TimeSeriesCollection measuredValuesColl = new TimeSeriesCollection(); // final TimeSeries correctedValues = new TimeSeries("Corrected"); final TimeSeriesCollection predictedValuesColl = new TimeSeriesCollection(); // ok, run through the points on the primary track final Iterator<Doublet> iter = _primaryDoublets.iterator(); SensorWrapper lastSensor = null; // sort out the speed of sound final String speedStr = CorePlugin.getDefault().getPreferenceStore() .getString(FrequencyCalcs.SPEED_OF_SOUND_KTS_PROPERTY); final double speedOfSound; if (speedStr != null && speedStr.length() > 0) { speedOfSound = Double.parseDouble(speedStr); } else { speedOfSound = FrequencyCalcs.SpeedOfSoundKts; } while (iter.hasNext()) { final Doublet thisD = iter.next(); try { final Color thisColor = thisD.getColor(); final double measuredFreq = thisD.getMeasuredFrequency(); final HiResDate currentTime = thisD.getDTG(); final FixedMillisecond thisMilli = new FixedMillisecond(currentTime .getDate().getTime()); final ColouredDataItem mFreq = new ColouredDataItem(thisMilli, measuredFreq, thisColor, false, null, true, true, thisD .getSensorCut()); // final ColouredDataItem corrFreq = new ColouredDataItem( // new FixedMillisecond(currentTime.getDate().getTime()), // correctedFreq, thisColor, false, null); final SensorWrapper thisSensor = thisD.getSensorCut().getSensor(); final String sensorName = thisSensor.getName(); safelyAddItem(measuredValuesColl, sensorName, mFreq); final double baseFreq = thisD.getBaseFrequency(); if (!Double.isNaN(baseFreq)) { // have we changed sensor? final boolean newSensor; if (thisSensor != null && !thisSensor.equals(lastSensor)) { newSensor = true; lastSensor = thisSensor; } else { newSensor = false; } final ColouredDataItem bFreq = new ColouredDataItem(thisMilli, baseFreq, thisColor.darker(), !newSensor, null, true, true); safelyAddItem(baseValuesSeries, sensorName + "(base)", bFreq); // do we have target data? if (thisD.getTarget() != null) { final Color calcColor = thisD.getTarget().getColor(); // did we get a base frequency? We may have a track // with a section of data that doesn't have frequency, you see. final double predictedFreq = thisD.getPredictedFrequency( speedOfSound); final double thisError = thisD.calculateFreqError(measuredFreq, predictedFreq); final Color predictedColor = halfWayColor(calcColor, thisColor); final ColouredDataItem pFreq = new ColouredDataItem(thisMilli, predictedFreq, predictedColor, true, null, true, true, thisD .getTarget()); final ColouredDataItem eFreq = new ColouredDataItem(thisMilli, thisError, thisColor, false, null, true, true); safelyAddItem(predictedValuesColl, sensorName, pFreq); safelyAddItem(dotPlotData, sensorName, eFreq); } // if we have a target } // if we have a base frequency } catch (final SeriesException e) { CorePlugin.logError(IStatus.INFO, "some kind of trip whilst updating frequency plot", e); } } // find the color for maximum value in the error series, if we have error data if (dotPlotData.getSeriesCount() > 0) { final double cutOffValue; // retrieve the cut-off value final String prefValue = Application.getThisProperty( RelativeTMASegment.CUT_OFF_VALUE_HZ); if (prefValue != null && prefValue.length() > 0 && Double.valueOf( prefValue) != null) { cutOffValue = Double.valueOf(prefValue) / 100d; } else { cutOffValue = 1d; } final Paint errorColor = calculateErrorShadeFor(dotPlotData, cutOffValue); backShader.setShade(errorColor); } final Iterator<?> mIter = measuredValuesColl.getSeries().iterator(); while (mIter.hasNext()) { final TimeSeries series = (TimeSeries) mIter.next(); linePlotData.addSeries(series); } // actualSeries.addSeries(correctedValues); final Iterator<?> pIter = predictedValuesColl.getSeries().iterator(); while (pIter.hasNext()) { final TimeSeries predictedValues = (TimeSeries) pIter.next(); linePlotData.addSeries(predictedValues); } if (baseValuesSeries.getSeries().size() > 0) { final Iterator<?> bIter = baseValuesSeries.getSeries().iterator(); while (bIter.hasNext()) { final TimeSeries baseValues = (TimeSeries) bIter.next(); linePlotData.addSeries(baseValues); } // sort out the rendering for the BaseFrequencies. // we want to show a solid line, with no markers final int BaseFreqSeries = 2; lineRend.setSeriesShape(BaseFreqSeries, ShapeUtilities.createDiamond( 0.2f)); lineRend.setSeriesStroke(BaseFreqSeries, new BasicStroke(4)); lineRend.setSeriesShapesVisible(BaseFreqSeries, false); lineRend.setSeriesShapesFilled(BaseFreqSeries, false); } } }
package org.mwc.debrief.track_shift.views; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Paint; import java.util.ArrayList; import java.util.Enumeration; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.SortedSet; import java.util.TreeSet; import java.util.Vector; import org.eclipse.core.runtime.IStatus; import org.eclipse.swt.widgets.Composite; import org.jfree.chart.plot.XYPlot; import org.jfree.data.general.Series; import org.jfree.data.general.SeriesException; import org.jfree.data.time.FixedMillisecond; import org.jfree.data.time.TimeSeries; import org.jfree.data.time.TimeSeriesCollection; import org.jfree.data.time.TimeSeriesDataItem; import org.jfree.util.ShapeUtilities; import org.mwc.cmap.core.CorePlugin; import org.mwc.debrief.track_shift.controls.ZoneChart; import org.mwc.debrief.track_shift.controls.ZoneChart.ColorProvider; import org.mwc.debrief.track_shift.controls.ZoneChart.Zone; import org.mwc.debrief.track_shift.controls.ZoneChart.ZoneSlicer; import org.mwc.debrief.track_shift.zig_detector.ArtificalLegDetector; import org.mwc.debrief.track_shift.zig_detector.IOwnshipLegDetector; import org.mwc.debrief.track_shift.zig_detector.Precision; import org.mwc.debrief.track_shift.zig_detector.ownship.LegOfData; import org.mwc.debrief.track_shift.zig_detector.ownship.PeakTrackingOwnshipLegDetector; import Debrief.GUI.Frames.Application; import Debrief.Wrappers.FixWrapper; import Debrief.Wrappers.ISecondaryTrack; import Debrief.Wrappers.SensorContactWrapper; import Debrief.Wrappers.SensorWrapper; import Debrief.Wrappers.TrackWrapper; import Debrief.Wrappers.Track.Doublet; import Debrief.Wrappers.Track.DynamicInfillSegment; import Debrief.Wrappers.Track.RelativeTMASegment; import Debrief.Wrappers.Track.TrackSegment; import Debrief.Wrappers.Track.TrackWrapper_Support.SegmentList; import MWC.Algorithms.FrequencyCalcs; import MWC.GUI.Editable; import MWC.GUI.ErrorLogger; import MWC.GUI.Layers; import MWC.GUI.JFreeChart.ColourStandardXYItemRenderer; import MWC.GUI.JFreeChart.ColouredDataItem; import MWC.GenericData.HiResDate; import MWC.GenericData.TimePeriod; import MWC.GenericData.Watchable; import MWC.GenericData.WatchableList; import MWC.GenericData.WorldDistance; import MWC.GenericData.WorldLocation; import MWC.GenericData.WorldSpeed; import MWC.GenericData.WorldVector; import MWC.TacticalData.Fix; import MWC.TacticalData.TrackDataProvider; public final class StackedDotHelper { public static final String MEASURED_DATASET = "Measured"; private static class TargetDoublet { public TrackSegment targetParent; public FixWrapper targetFix; } public static class TestSlicing extends junit.framework.TestCase { public void testOSLegDetector() { final TimeSeries osC = new TimeSeries(new FixedMillisecond()); long time = 0; osC.add(new FixedMillisecond(time++), 20d); osC.add(new FixedMillisecond(time++), 21d); osC.add(new FixedMillisecond(time++), 22d); osC.add(new FixedMillisecond(time++), 20d); osC.add(new FixedMillisecond(time++), 21d); osC.add(new FixedMillisecond(time++), 20d); assertFalse(containsIdenticalValues(osC, 3)); // inject some more duplicates osC.add(new FixedMillisecond(time++), 20d); osC.add(new FixedMillisecond(time++), 20d); osC.add(new FixedMillisecond(time++), 20d); assertTrue(containsIdenticalValues(osC, 3)); osC.clear(); osC.add(new FixedMillisecond(time++), 20d); osC.add(new FixedMillisecond(time++), 21d); osC.add(new FixedMillisecond(time++), 21d); osC.add(new FixedMillisecond(time++), 20d); osC.add(new FixedMillisecond(time++), 20d); osC.add(new FixedMillisecond(time++), 20d); osC.add(new FixedMillisecond(time++), 21d); osC.add(new FixedMillisecond(time++), 21d); osC.add(new FixedMillisecond(time++), 20d); osC.add(new FixedMillisecond(time++), 20d); assertFalse("check we're verifying single runs of matches", containsIdenticalValues(osC, 3)); osC.add(new FixedMillisecond(time++), 20d); osC.add(new FixedMillisecond(time++), 20d); assertTrue(containsIdenticalValues(osC, 3)); } public void testSetLeg() { final TrackWrapper host = new TrackWrapper(); host.setName("Host Track"); // create a sensor final SensorWrapper sensor = new SensorWrapper("Sensor"); sensor.setHost(host); host.add(sensor); // add some cuts final ArrayList<SensorContactWrapper> contacts = new ArrayList<SensorContactWrapper>(); for (int i = 0; i < 30; i++) { final HiResDate thisDTG = new HiResDate(10000 * i); final WorldLocation thisLocation = new WorldLocation(2 + 0.01 * i, 2 + 0.03 * i, 0); final SensorContactWrapper scw = new SensorContactWrapper(host.getName(), thisDTG, new WorldDistance(4, WorldDistance.MINUTES), 25d, thisLocation, Color.RED, "" + i, 0, sensor.getName()); sensor.add(scw); contacts.add(scw); // also create a host track fix at this DTG final Fix theFix = new Fix(thisDTG, thisLocation, 12d, 3d); final FixWrapper newF = new FixWrapper(theFix); host.add(newF); } // produce the target leg final TrackWrapper target = new TrackWrapper(); target.setName("Tgt Track"); // add a TMA leg final Layers theLayers = new Layers(); theLayers.addThisLayer(host); theLayers.addThisLayer(target); final SensorContactWrapper[] contactArr = contacts.toArray(new SensorContactWrapper[] {}); final RelativeTMASegment newLeg = new RelativeTMASegment(contactArr, new WorldVector(1, 1, 0), new WorldSpeed(12, WorldSpeed.Kts), 12d, theLayers, Color.red); target.add(newLeg); final BaseStackedDotsView view = new BaseStackedDotsView(true, false) { @Override protected boolean allowDisplayOfTargetOverview() { return false; } @Override protected boolean allowDisplayOfZoneChart() { return false; } @Override protected String formatValue(final double value) { return "" + value; } @Override protected ZoneSlicer getOwnshipZoneSlicer(final ColorProvider blueProv) { return null; } @Override protected String getType() { return null; } @Override protected String getUnits() { return null; } @Override protected void updateData(final boolean updateDoublets) { // no, nothing to do. } }; // try to set a zone on the track Zone trimmedPeriod = new Zone(150000, 220000, Color.RED); view.setLeg(host, target, trimmedPeriod); // ok, check the leg has changed assertEquals("leg start changed", 150000, target.getStartDTG().getDate() .getTime()); assertEquals("leg start changed", 220000, target.getEndDTG().getDate() .getTime()); // ok, also see if we can create a new leg trimmedPeriod = new Zone(250000, 320000, Color.RED); view.setLeg(host, target, trimmedPeriod); } } public static final String CALCULATED_VALUES = "Calculated"; /** * produce a color shade, according to whether the max error is inside 3 degrees or not. * * @param errorSeries * @return */ private static Paint calculateErrorShadeFor( final TimeSeriesCollection errorSeries, final double cutOffValue) { final Paint col; double maxError = 0d; final TimeSeries ts = errorSeries.getSeries(0); final List<?> items = ts.getItems(); for (final Iterator<?> iterator = items.iterator(); iterator.hasNext();) { final TimeSeriesDataItem item = (TimeSeriesDataItem) iterator.next(); final boolean useMe; // check this isn't infill if (item instanceof ColouredDataItem) { final ColouredDataItem cd = (ColouredDataItem) item; useMe = cd.isShapeFilled(); } else { useMe = true; } if (useMe) { final double thisE = (Double) item.getValue(); maxError = Math.max(maxError, Math.abs(thisE)); } } if (maxError > cutOffValue) { col = new Color(1f, 0f, 0f, 0.05f); } else { final float shade = (float) (0.03f + (cutOffValue - maxError) * 0.02f); col = new Color(0f, 1f, 0f, shade); } return col; } /** * determine if this time series contains many identical values - this is an indicator for data * coming from a simulator, for which turns can't be determined by our peak tracking algorithm. * * @param dataset * @return */ private static boolean containsIdenticalValues(final TimeSeries dataset, final Integer NumMatches) { final int num = dataset.getItemCount(); final int numMatches; if (NumMatches != null) { numMatches = NumMatches; } else { final double MATCH_PROPORTION = 0.1; numMatches = (int) (num * MATCH_PROPORTION); } double lastCourse = 0d; int matchCount = 0; for (int ctr = 0; ctr < num; ctr++) { final TimeSeriesDataItem thisItem = dataset.getDataItem(ctr); final double thisCourse = (Double) thisItem.getValue(); if (thisCourse == lastCourse) { // ok, count the duplicates matchCount++; if (matchCount >= numMatches) { return true; } } else { matchCount = 0; } lastCourse = thisCourse; } return false; } /** * sort out data of interest * */ public static TreeSet<Doublet> getDoublets(final TrackWrapper sensorHost, final ISecondaryTrack targetTrack, final boolean onlyVis, final boolean needBearing, final boolean needFrequency) { final TreeSet<Doublet> res = new TreeSet<Doublet>(); // friendly fix-wrapper to save us repeatedly creating it final FixWrapper index = new FixWrapper(new Fix(null, new WorldLocation(0, 0, 0), 0.0, 0.0)); final Vector<TrackSegment> theSegments; if (targetTrack != null) { theSegments = getTargetLegs(targetTrack); } else { theSegments = null; } // loop through our sensor data final Enumeration<Editable> sensors = sensorHost.getSensors().elements(); if (sensors != null) { while (sensors.hasMoreElements()) { final SensorWrapper wrapper = (SensorWrapper) sensors.nextElement(); if (!onlyVis || (onlyVis && wrapper.getVisible())) { final Enumeration<Editable> cuts = wrapper.elements(); while (cuts.hasMoreElements()) { final SensorContactWrapper scw = (SensorContactWrapper) cuts.nextElement(); if (!onlyVis || (onlyVis && scw.getVisible())) { // is this cut suitable for what we're looking for? if (needBearing) { if (!scw.getHasBearing()) { continue; } } // aaah, but does it meet the frequency requirement? if (needFrequency) { if (!scw.getHasFrequency()) { continue; } } /** * note: if this is frequency data then we accept an interpolated fix. This is based * upon the working practice that initially legs of target track are created from * bearing data. * * Plus, there is greater variance in bearing angle - so it's more important to get * the right data item. */ final boolean interpFix = needFrequency; /** * for frequency data we don't generate a double for dynamic infills, since we have * low confidence in the target course/speed */ final boolean allowInfill = !needFrequency; final TargetDoublet doublet = getTargetDoublet(index, theSegments, scw.getDTG(), interpFix, allowInfill); final Doublet thisDub; final FixWrapper hostFix; final Watchable[] matches = sensorHost.getNearestTo(scw.getDTG()); if (matches != null && matches.length == 1) { hostFix = (FixWrapper) matches[0]; } else { hostFix = null; } if (doublet.targetFix != null && hostFix != null) { thisDub = new Doublet(scw, doublet.targetFix, doublet.targetParent, hostFix); // if we've no target track add all the points if (targetTrack == null) { // store our data res.add(thisDub); } else { // if we've got a target track we only add points // for which we // have // a target location if (doublet.targetFix != null) { // store our data res.add(thisDub); } } // if we know the track // if there are any matching items } // if we find a match // this test used to be the following, but we changed it so we // could see measured data even when we don't have track: // else if ((targetTrack == null && hostFix != null) || (doublet.targetFix == null && // hostFix != null)) else if (hostFix != null && (doublet.targetFix == null || targetTrack == null)) { // no target data, just use ownship sensor data thisDub = new Doublet(scw, null, null, hostFix); res.add(thisDub); } } // if cut is visible } // loop through cuts } // if sensor is visible } // loop through sensors }// if there are sensors return res; } /** * * @param workingFix * pre-existing fix object, to stop us repeatedly creating it * @param theSegments * the segment within this track * @param requiredTime * the time we need data for * @param interpFix * whether to only accept a target fix within 1 second of the target time, or to * interpolate the nearest one * @param allowInfill * whether we generate a doublet for dynamic infill segments * @return a Doublet containing the relevant data */ private static TargetDoublet getTargetDoublet(final FixWrapper workingFix, final Vector<TrackSegment> theSegments, final HiResDate requiredTime, final boolean interpFix, final boolean allowInfill) { final TargetDoublet doublet = new TargetDoublet(); if (theSegments != null && !theSegments.isEmpty()) { final Iterator<TrackSegment> iter = theSegments.iterator(); while (iter.hasNext()) { final TrackSegment ts = iter.next(); if (ts.endDTG() == null || ts.startDTG() == null) { // ok, move onto the next segment CorePlugin.logError(IStatus.WARNING, "Warning, segment is missing data:" + ts, null); continue; } final TimePeriod validPeriod = new TimePeriod.BaseTimePeriod(ts.startDTG(), ts.endDTG()); if (validPeriod.contains(requiredTime)) { // if this is an infill, then we're relaxed about the errors if (ts instanceof DynamicInfillSegment) { // aaah, but are we interested in infill segments? if (allowInfill) { handleDynamicInfill(workingFix, requiredTime, doublet, ts); } } else { // see if we're allowing an interpolated fix if (interpFix) { generateInterpolatedDoublet(requiredTime, doublet, ts); break; } else { // ok, check we have a TMA fix almost exactly at this time final Enumeration<Editable> fixes = ts.elements(); while (fixes.hasMoreElements()) { final FixWrapper thisF = (FixWrapper) fixes.nextElement(); // note: workaround. When we've merged the track, // the new legs are actually one millisecond later. // workaround this. final long timeDiffMicros = Math.abs(thisF.getDTG().getMicros() - requiredTime.getMicros()); if (timeDiffMicros <= 1000) { // sorted. here we go doublet.targetParent = ts; doublet.targetFix = thisF; // ok, done. break; } } } } } } } return doublet; } private static void generateInterpolatedDoublet(final HiResDate requiredTime, final TargetDoublet doublet, final TrackSegment segment) { // ok, we'll interpolate the nearest value FixWrapper before = null; FixWrapper after = null; final Enumeration<Editable> fixes = segment.elements(); while (fixes.hasMoreElements() && after == null) { final FixWrapper thisF = (FixWrapper) fixes.nextElement(); final HiResDate thisTime = thisF.getDTG(); if (before == null || thisTime.lessThan(requiredTime)) { before = thisF; } else if (thisTime.greaterThanOrEqualTo(requiredTime)) { after = thisF; } } // ok, we've now boxed the required value final FixWrapper interp = FixWrapper.interpolateFix(before, after, requiredTime); doublet.targetFix = interp; doublet.targetParent = segment; } private static void handleDynamicInfill(final FixWrapper workingFix, final HiResDate requiredTime, final TargetDoublet doublet, final TrackSegment segment) { // sorted. here we go doublet.targetParent = segment; // create an object with the right time workingFix.getFix().setTime(requiredTime); // and find any matching items final SortedSet<Editable> items = segment.tailSet(workingFix); if (!items.isEmpty()) { doublet.targetFix = (FixWrapper) items.first(); } } private static Vector<TrackSegment> getTargetLegs( final ISecondaryTrack targetTrack) { final Vector<TrackSegment> _theSegments = new Vector<TrackSegment>(); final Enumeration<Editable> trkData = targetTrack.segments(); while (trkData.hasMoreElements()) { final Editable thisI = trkData.nextElement(); if (thisI instanceof SegmentList) { final SegmentList thisList = (SegmentList) thisI; final Enumeration<Editable> theElements = thisList.elements(); while (theElements.hasMoreElements()) { final TrackSegment ts = (TrackSegment) theElements.nextElement(); if (ts.getVisible()) { _theSegments.add(ts); } } } else if (thisI instanceof TrackSegment) { final TrackSegment ts = (TrackSegment) thisI; _theSegments.add(ts); } } return _theSegments; } public static ArrayList<Zone> sliceOwnship(final TimeSeries osCourse, final ZoneChart.ColorProvider colorProvider) { // make a decision on which ownship slicer to use final IOwnshipLegDetector detector; if (containsIdenticalValues(osCourse, null)) { detector = new ArtificalLegDetector(); } else { detector = new PeakTrackingOwnshipLegDetector(); } final int num = osCourse.getItemCount(); final long[] times = new long[num]; final double[] speeds = new double[num]; final double[] courses = new double[num]; for (int ctr = 0; ctr < num; ctr++) { final TimeSeriesDataItem thisItem = osCourse.getDataItem(ctr); final FixedMillisecond thisM = (FixedMillisecond) thisItem.getPeriod(); times[ctr] = thisM.getMiddleMillisecond(); speeds[ctr] = 0; courses[ctr] = (Double) thisItem.getValue(); } final List<LegOfData> legs = detector.identifyOwnshipLegs(times, speeds, courses, 5, Precision.LOW); final ArrayList<Zone> res = new ArrayList<Zone>(); for (final LegOfData leg : legs) { final Zone newZone = new Zone(leg.getStart(), leg.getEnd(), colorProvider.getZoneColor()); res.add(newZone); } return res; } /** * the maximum number of items we plot as symbols. Above this we just use a line */ private final int MAX_ITEMS_TO_PLOT = 1000; /** * the track being dragged */ private TrackWrapper _primaryTrack; /** * the secondary track we're monitoring */ private ISecondaryTrack _secondaryTrack; /** * the set of points to watch on the primary track. This is stored as a sorted set because if we * have multiple sensors they may be suppled in chronological order, or they may represent * overlapping time periods */ private TreeSet<Doublet> _primaryDoublets; public List<SensorContactWrapper> getBearings( final TrackWrapper primaryTrack, final boolean onlyVis, final TimePeriod targetPeriod) { final List<SensorContactWrapper> res = new ArrayList<SensorContactWrapper>(); // loop through our sensor data final Enumeration<Editable> sensors = primaryTrack.getSensors().elements(); if (sensors != null) { while (sensors.hasMoreElements()) { final SensorWrapper wrapper = (SensorWrapper) sensors.nextElement(); if (!onlyVis || (onlyVis && wrapper.getVisible())) { final Enumeration<Editable> cuts = wrapper.elements(); while (cuts.hasMoreElements()) { final SensorContactWrapper scw = (SensorContactWrapper) cuts.nextElement(); if (!onlyVis || (onlyVis && scw.getVisible())) { if (targetPeriod == null || targetPeriod.contains(scw.getDTG())) { res.add(scw); } // if we find a match } // if cut is visible } // loop through cuts } // if sensor is visible } // loop through sensors }// if there are sensors return res; } public TreeSet<Doublet> getDoublets(final boolean onlyVis, final boolean needBearing, final boolean needFrequency) { return getDoublets(_primaryTrack, _secondaryTrack, onlyVis, needBearing, needFrequency); } public TrackWrapper getPrimaryTrack() { return _primaryTrack; } public ISecondaryTrack getSecondaryTrack() { return _secondaryTrack; } /** * initialise the data, check we've got sensor data & the correct number of visible tracks * * @param showError * @param onlyVis * @param holder */ void initialise(final TrackDataProvider tracks, final boolean showError, final boolean onlyVis, final Composite holder, final ErrorLogger logger, final String dataType, final boolean needBrg, final boolean needFreq) { // have we been created? if (holder == null) { return; } // are we visible? if (holder.isDisposed()) { return; } _secondaryTrack = null; _primaryTrack = null; // do we have some data? if (tracks == null) { // output error message logger.logError(IStatus.INFO, "Please open a Debrief plot", null); return; } // check we have a primary track final WatchableList priTrk = tracks.getPrimaryTrack(); if (priTrk == null) { logger.logError(IStatus.INFO, "A primary track must be placed on the Tote", null); return; } else { if (!(priTrk instanceof TrackWrapper)) { logger.logError(IStatus.INFO, "The primary track must be a vehicle track", null); return; } else { _primaryTrack = (TrackWrapper) priTrk; } } // now the sec track final WatchableList[] secs = tracks.getSecondaryTracks(); // any? if ((secs == null) || (secs.length == 0)) { logger.logError(IStatus.INFO, "No secondary track assigned", null); } else { // too many? if (secs.length > 1) { logger.logError(IStatus.INFO, "Only 1 secondary track may be on the tote", null); return; } // correct sort? final WatchableList secTrk = secs[0]; if (!(secTrk instanceof ISecondaryTrack)) { logger.logError(IStatus.INFO, "The secondary track must be a vehicle track", null); return; } else { _secondaryTrack = (ISecondaryTrack) secTrk; } } // must have worked, hooray logger.logError(IStatus.OK, null, null); // ok, get the positions updateDoublets(onlyVis, needBrg, needFreq); } /** * clear our data, all is finished */ public void reset() { if (_primaryDoublets != null) { _primaryDoublets.clear(); } _primaryDoublets = null; _primaryTrack = null; _secondaryTrack = null; } /** * ok, our track has been dragged, calculate the new series of offsets * * @param linePlot * @param dotPlot * @param onlyVis * @param showCourse * @param b * @param holder * @param logger * @param targetCourseSeries * @param targetSpeedSeries * @param ownshipCourseSeries * @param targetBearingSeries * @param overviewSpeedRenderer * @param _overviewCourseRenderer * * @param currentOffset * how far the current track has been dragged */ public void updateBearingData(final XYPlot dotPlot, final XYPlot linePlot, final XYPlot targetPlot, final TrackDataProvider tracks, final boolean onlyVis, final boolean showCourse, final boolean flipAxes, final Composite holder, final ErrorLogger logger, final boolean updateDoublets, final TimeSeriesCollection targetCourseSeries, final TimeSeriesCollection targetSpeedSeries, final TimeSeries measuredValues, final TimeSeries ambigValues, final TimeSeries ownshipCourseSeries, final TimeSeries targetBearingSeries, final TimeSeries targetCalculatedSeries, final ResidualXYItemRenderer overviewSpeedRenderer, final WrappingResidualRenderer overviewCourseRenderer) { // do we even have a primary track if (_primaryTrack == null) { // ok, clear the data linePlot.setDataset(null); dotPlot.setDataset(null); targetPlot.setDataset(null); targetSpeedSeries.removeAllSeries(); return; } // ok, find the track wrappers if (_secondaryTrack == null) { initialise(tracks, false, onlyVis, holder, logger, "Bearing", true, false); } // did it work? // if (_secondaryTrack == null) // return; // ok - the tracks have moved. better update the doublets if (updateDoublets) { updateDoublets(onlyVis, true, false); } // aah - but what if we've ditched our doublets? if ((_primaryDoublets == null) || (_primaryDoublets.size() == 0)) { // better clear the plot dotPlot.setDataset(null); linePlot.setDataset(null); targetPlot.setDataset(null); targetPlot.setDataset(1, null); return; } // create the collection of series final TimeSeriesCollection errorSeries = new TimeSeriesCollection(); final TimeSeriesCollection actualSeries = new TimeSeriesCollection(); // the previous steps occupy some time. // just check we haven't lost the primary track while they were running if (_primaryTrack == null) { return; } // produce a dataset for each track final TimeSeries errorValues = new TimeSeries(_primaryTrack.getName()); final TimeSeries ambigErrorValues = new TimeSeries(_primaryTrack.getName() + "(A)"); final TimeSeries calculatedValues = new TimeSeries(CALCULATED_VALUES); final TimeSeries osCourseValues = new TimeSeries("O/S Course"); final TimeSeries tgtCourseValues = new TimeSeries("Tgt Course"); final TimeSeries tgtSpeedValues = new TimeSeries("Tgt Speed"); final TimeSeries allCuts = new TimeSeries("Sensor cuts"); // createa list of series, so we can pause their updates final List<TimeSeries> sList = new Vector<TimeSeries>(); sList.add(errorValues); sList.add(ambigErrorValues); sList.add(measuredValues); sList.add(ambigValues); sList.add(calculatedValues); sList.add(osCourseValues); sList.add(tgtCourseValues); sList.add(tgtSpeedValues); sList.add(allCuts); sList.add(targetCalculatedSeries); sList.add(targetBearingSeries); sList.add(ownshipCourseSeries); final List<TimeSeriesCollection> tList = new Vector<TimeSeriesCollection>(); tList.add(targetCourseSeries); tList.add(targetSpeedSeries); tList.add(errorSeries); tList.add(actualSeries); // ok, wrap the switching on/off of notify in try/catch, // to be sure to switch notify back on at end try { // now switch off updates for (final TimeSeriesCollection series : tList) { series.setNotify(false); } for (final TimeSeries series : sList) { series.setNotify(false); // and clear the list series.clear(); } // clear the existing target datasets targetCourseSeries.removeAllSeries(); targetSpeedSeries.removeAllSeries(); // create the color for resolved ambig data final Color grayShade = new Color(155, 155, 155, 50); // ok, run through the points on the primary track final Iterator<Doublet> iter = _primaryDoublets.iterator(); while (iter.hasNext()) { final Doublet thisD = iter.next(); final boolean parentIsNotDynamic = thisD.getTargetTrack() == null || !(thisD.getTargetTrack() instanceof DynamicInfillSegment); try { // obvious stuff first (stuff that doesn't need the tgt data) final Color thisColor = thisD.getColor(); double measuredBearing = thisD.getMeasuredBearing(); double ambigBearing = thisD.getAmbiguousMeasuredBearing(); final HiResDate currentTime = thisD.getDTG(); final FixedMillisecond thisMilli = new FixedMillisecond(currentTime.getDate().getTime()); final boolean hasAmbiguous = !Double.isNaN(ambigBearing); // ok, we need to make the color darker if it's starboard final boolean bearingToPort = thisD.getSensorCut().isBearingToPort(); // make the color darker, if it's to stbg final Color bearingColor; if (bearingToPort) { bearingColor = thisColor; } else { bearingColor = thisColor.darker(); } // put the measured bearing back in the positive domain if (measuredBearing < 0) { measuredBearing += 360d; } // stop, stop, stop - do we wish to plot bearings in the +/- 180 domain? if (flipAxes) { if (measuredBearing > 180) { measuredBearing -= 360; } } final ColouredDataItem mBearing = new ColouredDataItem(thisMilli, measuredBearing, bearingColor, false, null, true, parentIsNotDynamic, thisD.getSensorCut()); // and add them to the series measuredValues.addOrUpdate(mBearing); if (hasAmbiguous) { // put the ambig baering into the correct domain while (ambigBearing < 0) { ambigBearing += 360; } if (flipAxes && ambigBearing > 180) { ambigBearing -= 360; } // make the color darker, if we're on the stbd bearnig final Color ambigColor; if (bearingToPort) { ambigColor = thisColor.darker(); } else { ambigColor = thisColor; } // if this cut has been resolved, we don't show a symbol // for the ambiguous cut final boolean showSymbol = true; final Color color = thisD.getHasBeenResolved() ? grayShade : ambigColor; final ColouredDataItem amBearing = new ColouredDataItem(thisMilli, ambigBearing, color, false, null, showSymbol, parentIsNotDynamic, thisD.getSensorCut()); ambigValues.addOrUpdate(amBearing); } // do we have target data? if (thisD.getTarget() != null) { // and has this target fix know it's location? // (it may not, if it's a relative leg that has been extended) if (thisD.getTarget().getFixLocation() != null) { double calculatedBearing = thisD.getCalculatedBearing(null, null); final Color errorColor = thisD.getTarget().getColor(); final double thisTrueError = thisD.calculateBearingError(measuredBearing, calculatedBearing); if (flipAxes) { if (calculatedBearing > 180) { calculatedBearing -= 360; } } else { if (calculatedBearing < 0) { calculatedBearing += 360; } } final Color brgColor; if (bearingToPort) { brgColor = errorColor; } else { brgColor = errorColor.darker(); } final ColouredDataItem newTrueError = new ColouredDataItem(thisMilli, thisTrueError, brgColor, false, null, true, parentIsNotDynamic); final ColouredDataItem cBearing = new ColouredDataItem(thisMilli, calculatedBearing, brgColor, true, null, true, parentIsNotDynamic, thisD.getTarget()); errorValues.addOrUpdate(newTrueError); calculatedValues.addOrUpdate(cBearing); // and the ambiguous error, if it hasn't been resolved if (!thisD.getHasBeenResolved()) { if (flipAxes) { if (ambigBearing > 180) { ambigBearing -= 360; } } final Color ambigColor; if (bearingToPort) { ambigColor = errorColor.darker(); } else { ambigColor = errorColor; } final double thisAmnigError = thisD .calculateBearingError(ambigBearing, calculatedBearing); final ColouredDataItem newAmbigError = new ColouredDataItem(thisMilli, thisAmnigError, ambigColor, false, null, true, parentIsNotDynamic); ambigErrorValues.addOrUpdate(newAmbigError); } } } } catch (final SeriesException e) { CorePlugin.logError(IStatus.INFO, "some kind of trip whilst updating bearing plot", e); } } // just double-check we've still got our primary doublets if (_primaryDoublets == null) { CorePlugin.logError(IStatus.WARNING, "FOR SOME REASON PRIMARY DOUBLETS IS NULL - INVESTIGATE", null); return; } if (_primaryDoublets.size() == 0) { CorePlugin.logError(IStatus.WARNING, "FOR SOME REASON PRIMARY DOUBLETS IS ZERO LENGTH - INVESTIGATE", null); return; } // right, we do course in a special way, since it isn't dependent on the // target track. Do course here. final HiResDate startDTG = _primaryDoublets.first().getDTG(); final HiResDate endDTG = _primaryDoublets.last().getDTG(); if (startDTG.greaterThan(endDTG)) { System.err.println("in the wrong order, start:" + startDTG + " end:" + endDTG); return; } // special case - if the primary track is a single location if (_primaryTrack.isSinglePointTrack()) { // ok, it's a single point. We'll use the sensor cut times for the course data // get the single location final FixWrapper loc = (FixWrapper) _primaryTrack.getPositionIterator().nextElement(); final Enumeration<Editable> segments = _secondaryTrack.segments(); while (segments.hasMoreElements()) { final Editable nextE = segments.nextElement(); // if there's just one segment - then we need to wrap it final SegmentList segList; if (nextE instanceof SegmentList) { segList = (SegmentList) nextE; } else { segList = new SegmentList(); segList.addSegment((TrackSegment) nextE); } final Enumeration<Editable> segIter = segList.elements(); while (segIter.hasMoreElements()) { final TrackSegment segment = (TrackSegment) segIter.nextElement(); final Enumeration<Editable> enumer = segment.elements(); while (enumer.hasMoreElements()) { final FixWrapper thisTgtFix = (FixWrapper) enumer.nextElement(); double ownshipCourse = MWC.Algorithms.Conversions.Rads2Degs(loc.getCourse()); // stop, stop, stop - do we wish to plot bearings in the +/- 180 domain? if (flipAxes && ownshipCourse > 180) { ownshipCourse -= 360; } final FixedMillisecond thisMilli = new FixedMillisecond(thisTgtFix.getDateTimeGroup().getDate() .getTime()); final ColouredDataItem crseBearing = new ColouredDataItem(thisMilli, ownshipCourse, loc.getColor(), true, null, true, true); osCourseValues.addOrUpdate(crseBearing); } } } } else { // loop through using the iterator final Enumeration<Editable> pIter = _primaryTrack.getPositionIterator(); final TimePeriod validPeriod = new TimePeriod.BaseTimePeriod(startDTG, endDTG); final List<Editable> validItems = new LinkedList<Editable>(); while (pIter.hasMoreElements()) { final FixWrapper fw = (FixWrapper) pIter.nextElement(); if (validPeriod.contains(fw.getDateTimeGroup())) { validItems.add(fw); } else { // have we passed the end of the requested period? if (fw.getDateTimeGroup().greaterThan(endDTG)) { // ok, drop out break; } } } // ok, now go through the list final Iterator<Editable> vIter = validItems.iterator(); final int freq = Math.max(1, validItems.size() / MAX_ITEMS_TO_PLOT); int ctr = 0; while (vIter.hasNext()) { final Editable ed = vIter.next(); if (ctr++ % freq == 0) { final FixWrapper fw = (FixWrapper) ed; final FixedMillisecond thisMilli = new FixedMillisecond(fw.getDateTimeGroup().getDate().getTime()); double ownshipCourse = MWC.Algorithms.Conversions.Rads2Degs(fw.getCourse()); // stop, stop, stop - do we wish to plot bearings in the +/- 180 domain? if (flipAxes && ownshipCourse > 180) { ownshipCourse -= 360; } final ColouredDataItem crseBearing = new ColouredDataItem(thisMilli, ownshipCourse, fw.getColor(), true, null, true, true); osCourseValues.addOrUpdate(crseBearing); } } } if (_secondaryTrack != null) { // sort out the target course/speed final Enumeration<Editable> segments = _secondaryTrack.segments(); final TimePeriod period = new TimePeriod.BaseTimePeriod(startDTG, endDTG); while (segments.hasMoreElements()) { final Editable nextE = segments.nextElement(); // if there's just one segment - then we need to wrap it final SegmentList segList; if (nextE instanceof SegmentList) { segList = (SegmentList) nextE; } else { segList = new SegmentList(); // note: we can only set the wrapper // if we're looking at a real TMA solution if (_secondaryTrack instanceof TrackWrapper) { segList.setWrapper((TrackWrapper) _secondaryTrack); } // ok, add this segment to the list segList.addSegment((TrackSegment) nextE); } final Enumeration<Editable> segIter = segList.elements(); while (segIter.hasMoreElements()) { final TrackSegment segment = (TrackSegment) segIter.nextElement(); // is this an infill segment final boolean isInfill = segment instanceof DynamicInfillSegment; // check it has values, and is in range if (segment.isEmpty() || segment.startDTG().greaterThan(endDTG) || segment.endDTG().lessThan(startDTG)) { // ok, we can skip this one } else { final Enumeration<Editable> points = segment.elements(); Double lastCourse = null; while (points.hasMoreElements()) { final FixWrapper fw = (FixWrapper) points.nextElement(); if (period.contains(fw.getDateTimeGroup())) { // ok, create a point for it final FixedMillisecond thisMilli = new FixedMillisecond(fw.getDateTimeGroup().getDate() .getTime()); double tgtCourse = MWC.Algorithms.Conversions.Rads2Degs(fw.getCourse()); final double tgtSpeed = fw.getSpeed(); // see if we need to change the domain of the course to match // the previous value if (lastCourse != null) { if (tgtCourse - lastCourse > 190) { tgtCourse = tgtCourse - 360; } else if (tgtCourse - lastCourse < -180) { tgtCourse = 360 + tgtCourse; } } lastCourse = tgtCourse; // trim to +/- domain if we're flipping axes if (flipAxes && tgtCourse > 180) { tgtCourse -= 360; } // we use the raw color for infills, to help find which // infill we're referring to (esp in random infills) final Color courseColor; final Color speedColor; if (isInfill) { courseColor = fw.getColor(); speedColor = fw.getColor(); } else { courseColor = fw.getColor().brighter(); speedColor = fw.getColor().darker(); } final ColouredDataItem crseBearingItem = new ColouredDataItem(thisMilli, tgtCourse, courseColor, isInfill, null, true, true); tgtCourseValues.addOrUpdate(crseBearingItem); final ColouredDataItem tgtSpeedItem = new ColouredDataItem(thisMilli, tgtSpeed, speedColor, isInfill, null, true, true); tgtSpeedValues.addOrUpdate(tgtSpeedItem); } } } } } } // sort out the sensor cuts (all of them, not just those when we have target legs) final TimePeriod sensorPeriod; if (_secondaryTrack != null) { sensorPeriod = new TimePeriod.BaseTimePeriod(_secondaryTrack.getStartDTG(), _secondaryTrack.getEndDTG()); } else { sensorPeriod = null; } final List<SensorContactWrapper> theBearings = getBearings(_primaryTrack, onlyVis, sensorPeriod); for (final SensorContactWrapper cut : theBearings) { double theBearing; // ensure it's in the positive domain if (cut.getBearing() < 0) { theBearing = cut.getBearing() + 360; } else { theBearing = cut.getBearing(); } // put in the correct domain, if necessary if (flipAxes) { if (theBearing > 180d) { theBearing -= 360d; } } else { if (theBearing < 0) { theBearing += 360; } } // ok, store it. allCuts.addOrUpdate(new TimeSeriesDataItem(new FixedMillisecond(cut .getDTG().getDate().getTime()), theBearing)); } // ok, add these new series if (errorValues.getItemCount() > 0) { errorSeries.addSeries(errorValues); } if (ambigErrorValues.getItemCount() > 0) { errorSeries.addSeries(ambigErrorValues); } actualSeries.addSeries(measuredValues); if (ambigValues.getItemCount() > 0) { actualSeries.addSeries(ambigValues); } if (calculatedValues.getItemCount() > 0) { actualSeries.addSeries(calculatedValues); } if (tgtCourseValues.getItemCount() > 0) { targetCourseSeries.addSeries(tgtCourseValues); // ok, sort out the renderer overviewCourseRenderer.setLightweightMode(tgtCourseValues .getItemCount() > MAX_ITEMS_TO_PLOT); } if (tgtSpeedValues.getItemCount() > 0) { targetSpeedSeries.addSeries(tgtSpeedValues); overviewSpeedRenderer .setLightweightMode(tgtSpeedValues.getItemCount() > MAX_ITEMS_TO_PLOT); } if (showCourse) { targetCourseSeries.addSeries(osCourseValues); } if (calculatedValues.getItemCount() > 0) { targetCalculatedSeries.addAndOrUpdate(calculatedValues); } // and the course data for the zone chart if (!osCourseValues.isEmpty()) { if (ownshipCourseSeries != null) { // is it currently empty? if (ownshipCourseSeries.isEmpty()) { ownshipCourseSeries.addAndOrUpdate(osCourseValues); } else { // ok, ignore it. we only assign the data in the first pass } } } // and the bearing data for the zone chart if (!allCuts.isEmpty()) { if (targetBearingSeries != null) { // is it currently empty? if (targetBearingSeries.isEmpty()) { targetBearingSeries.addAndOrUpdate(allCuts); } else { // ok, ignore it. we only assign the data in the first pass } } } // find the color for maximum value in the error series, if we have error data if (errorSeries.getSeriesCount() > 0) { // retrieve the cut-off value final double cutOffValue; final String prefValue = Application.getThisProperty(RelativeTMASegment.CUT_OFF_VALUE_DEGS); if (prefValue != null && prefValue.length() > 0 && Double.valueOf(prefValue) != null) { cutOffValue = Double.valueOf(prefValue); } else { cutOffValue = 3d; } final Paint errorColor = calculateErrorShadeFor(errorSeries, cutOffValue); dotPlot.setBackgroundPaint(errorColor); } dotPlot.setDataset(errorSeries); linePlot.setDataset(actualSeries); targetPlot.setDataset(0, targetCourseSeries); targetPlot.setDataset(1, targetSpeedSeries); } finally { // now switch off updates for (final Series series : sList) { series.setNotify(true); } // now switch off updates for (final TimeSeriesCollection series : tList) { series.setNotify(true); } } } /** * go through the tracks, finding the relevant position on the other track. * */ private void updateDoublets(final boolean onlyVis, final boolean needBearing, final boolean needFreq) { // ok - we're now there // so, do we have primary and secondary tracks? if (_primaryTrack != null) { // cool sort out the list of sensor locations for these tracks _primaryDoublets = getDoublets(_primaryTrack, _secondaryTrack, onlyVis, needBearing, needFreq); } } /** * ok, our track has been dragged, calculate the new series of offsets * * @param linePlot * @param dotPlot * @param onlyVis * @param holder * @param logger * @param fZeroMarker * * @param currentOffset * how far the current track has been dragged */ public void updateFrequencyData(final XYPlot dotPlot, final XYPlot linePlot, final TrackDataProvider tracks, final boolean onlyVis, final Composite holder, final ErrorLogger logger, final boolean updateDoublets) { // do we have anything? if (_primaryTrack == null) { return; } // ok, find the track wrappers if (_secondaryTrack == null) { initialise(tracks, false, onlyVis, holder, logger, "Frequency", false, true); } // ok - the tracks have moved. better update the doublets if (updateDoublets) { updateDoublets(onlyVis, false, true); } // aah - but what if we've ditched our doublets? if ((_primaryDoublets == null) || (_primaryDoublets.size() == 0)) { // better clear the plot dotPlot.setDataset(null); linePlot.setDataset(null); return; } // create the collection of series final TimeSeriesCollection errorSeries = new TimeSeriesCollection(); final TimeSeriesCollection actualSeries = new TimeSeriesCollection(); if (_primaryTrack == null) { return; } // produce a dataset for each track final TimeSeries errorValues = new TimeSeries(_primaryTrack.getName()); final TimeSeries measuredValues = new TimeSeries(MEASURED_DATASET); // final TimeSeries correctedValues = new TimeSeries("Corrected"); final TimeSeries predictedValues = new TimeSeries("Predicted"); final TimeSeries baseValues = new TimeSeries("Base"); // ok, run through the points on the primary track final Iterator<Doublet> iter = _primaryDoublets.iterator(); SensorWrapper lastSensor = null; // sort out the speed of sound final String speedStr = CorePlugin.getDefault().getPreferenceStore().getString( FrequencyCalcs.SPEED_OF_SOUND_KTS_PROPERTY); final double speedOfSound; if (speedStr != null && speedStr.length() > 0) { speedOfSound = Double.parseDouble(speedStr); } else { speedOfSound = FrequencyCalcs.SpeedOfSoundKts; } while (iter.hasNext()) { final Doublet thisD = iter.next(); try { final Color thisColor = thisD.getColor(); final double measuredFreq = thisD.getMeasuredFrequency(); final HiResDate currentTime = thisD.getDTG(); final FixedMillisecond thisMilli = new FixedMillisecond(currentTime.getDate().getTime()); final ColouredDataItem mFreq = new ColouredDataItem(thisMilli, measuredFreq, thisColor, false, null, true, true, thisD.getSensorCut()); // final ColouredDataItem corrFreq = new ColouredDataItem( // new FixedMillisecond(currentTime.getDate().getTime()), // correctedFreq, thisColor, false, null); measuredValues.addOrUpdate(mFreq); final double baseFreq = thisD.getBaseFrequency(); if (!Double.isNaN(baseFreq)) { // have we changed sensor? final SensorWrapper thisSensor = thisD.getSensorCut().getSensor(); final boolean newSensor; if (thisSensor != null && !thisSensor.equals(lastSensor)) { newSensor = true; lastSensor = thisSensor; } else { newSensor = false; } final ColouredDataItem bFreq = new ColouredDataItem(thisMilli, baseFreq, thisColor.darker(), !newSensor, null, true, true); baseValues.addOrUpdate(bFreq); // do we have target data? if (thisD.getTarget() != null) { final Color calcColor = thisD.getTarget().getColor(); // did we get a base frequency? We may have a track // with a section of data that doesn't have frequency, you see. final double predictedFreq = thisD.getPredictedFrequency(speedOfSound); final double thisError = thisD.calculateFreqError(measuredFreq, predictedFreq); final ColouredDataItem pFreq = new ColouredDataItem(thisMilli, predictedFreq, calcColor, true, null, true, true, thisD.getTarget()); final ColouredDataItem eFreq = new ColouredDataItem(thisMilli, thisError, thisColor, false, null, true, true); predictedValues.addOrUpdate(pFreq); errorValues.addOrUpdate(eFreq); } // if we have a target } // if we have a base frequency } catch (final SeriesException e) { CorePlugin.logError(IStatus.INFO, "some kind of trip whilst updating frequency plot", e); } } // ok, add these new series if (errorValues.getItemCount() > 0) { errorSeries.addSeries(errorValues); } // find the color for maximum value in the error series, if we have error data if (errorSeries.getSeriesCount() > 0) { final double cutOffValue; // retrieve the cut-off value final String prefValue = Application.getThisProperty(RelativeTMASegment.CUT_OFF_VALUE_HZ); if (prefValue != null && prefValue.length() > 0 && Double.valueOf(prefValue) != null) { cutOffValue = Double.valueOf(prefValue) / 100d; } else { cutOffValue = 1d; } final Paint errorColor = calculateErrorShadeFor(errorSeries, cutOffValue); dotPlot.setBackgroundPaint(errorColor); } actualSeries.addSeries(measuredValues); // actualSeries.addSeries(correctedValues); if (predictedValues.getItemCount() > 0) { actualSeries.addSeries(predictedValues); } if (baseValues.getItemCount() > 0) { actualSeries.addSeries(baseValues); // sort out the rendering for the BaseFrequencies. // we want to show a solid line, with no markers final int BaseFreqSeries = 2; final ColourStandardXYItemRenderer lineRend = (ColourStandardXYItemRenderer) linePlot.getRenderer(); lineRend.setSeriesShape(BaseFreqSeries, ShapeUtilities .createDiamond(0.2f)); lineRend.setSeriesStroke(BaseFreqSeries, new BasicStroke(4)); lineRend.setSeriesShapesVisible(BaseFreqSeries, false); lineRend.setSeriesShapesFilled(BaseFreqSeries, false); } dotPlot.setDataset(errorSeries); linePlot.setDataset(actualSeries); } }
package org.spoofax.jsglr2.imploder; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import org.metaborg.parsetable.productions.IProduction; import org.metaborg.parsetable.symbols.IMetaVarSymbol; import org.spoofax.jsglr.client.imploder.IToken; import org.spoofax.jsglr2.imploder.treefactory.ITokenizedTreeFactory; import org.spoofax.jsglr2.messages.Message; import org.spoofax.jsglr2.parseforest.IDerivation; import org.spoofax.jsglr2.parseforest.IParseForest; import org.spoofax.jsglr2.parseforest.IParseNode; import org.spoofax.jsglr2.parser.Position; import org.spoofax.jsglr2.recovery.RecoveryMessages; import org.spoofax.jsglr2.tokens.Tokens; public abstract class TokenizedTreeImploder //@formatter:off <ParseForest extends IParseForest, ParseNode extends IParseNode<ParseForest, Derivation>, Derivation extends IDerivation<ParseForest>, Tree> //@formatter:on extends AbstractTreeImploder<ParseForest, ParseNode, Derivation, Tokens, Void, Tree, ImplodeResult<Tokens, Void, Tree>> { protected final ITokenizedTreeFactory<Tree> treeFactory; public TokenizedTreeImploder(ITokenizedTreeFactory<Tree> treeFactory) { this.treeFactory = treeFactory; } @Override public ImplodeResult<Tokens, Void, Tree> implode(String input, String fileName, ParseForest parseForest, Void resultCache) { @SuppressWarnings("unchecked") ParseNode topParseNode = (ParseNode) parseForest; Collection<Message> messages = new ArrayList<>(); Tokens tokens = new Tokens(input, fileName); tokens.makeStartToken(); Position position = Position.START_POSITION; SubTree<Tree> tree = implodeParseNode(topParseNode, messages, tokens, position, tokens.startToken()); tokens.makeEndToken(tree.endPosition); tokenTreeBinding(tokens.startToken(), tree.tree); tokenTreeBinding(tokens.endToken(), tree.tree); return new ImplodeResult<>(tokens, null, tree.tree, messages); } static class SubTree<Tree> { Tree tree; Position endPosition; IToken leftToken, rightToken; SubTree(Tree tree, Position endPosition, IToken leftToken, IToken rightToken) { this.tree = tree; this.endPosition = endPosition; this.leftToken = leftToken; this.rightToken = rightToken; } } protected SubTree<Tree> implodeParseNode(ParseNode parseNode, Collection<Message> messages, Tokens tokens, Position startPosition, IToken parentLeftToken) { parseNode = implodeInjection(parseNode); IProduction production = parseNode.production(); if(production.isContextFree() && !production.isSkippableInParseForest()) { List<Derivation> filteredDerivations = applyDisambiguationFilters(parseNode); if(filteredDerivations.size() > 1) { List<Tree> trees = new ArrayList<>(filteredDerivations.size()); SubTree<Tree> result = null; if(production.isList()) { for(List<ParseForest> derivationParseForests : implodeAmbiguousLists(filteredDerivations)) { if(result == null) { result = implodeListDerivation(messages, tokens, production, derivationParseForests, startPosition, parentLeftToken); trees.add(result.tree); } else trees.add(implodeListDerivation(messages, tokens, production, derivationParseForests, startPosition, parentLeftToken).tree); } } else { for(Derivation derivation : filteredDerivations) { if(result == null) { result = implodeDerivation(messages, tokens, derivation, startPosition, parentLeftToken); trees.add(result.tree); } else trees.add( implodeDerivation(messages, tokens, derivation, startPosition, parentLeftToken).tree); } } result.tree = treeFactory.createAmb(trees, result.leftToken, result.rightToken); return result; } else return implodeDerivation(messages, tokens, filteredDerivations.get(0), startPosition, parentLeftToken); } else { int width = parseNode.width(); Position endPosition = startPosition.step(tokens.getInput(), width); if(production.isRecovery()) messages.add(RecoveryMessages.get(production, startPosition, endPosition)); IToken token = width > 0 ? tokens.makeToken(startPosition, endPosition, production) : null; Tree tree; if(production.isLayout() || production.isLiteral()) { tree = null; } else if(production.isLexical()) { tree = createLexicalTerm(production, tokens.toString(startPosition.offset, endPosition.offset), token); } else { throw new RuntimeException("invalid term type"); } return new SubTree<>(tree, endPosition, token, token); } } protected SubTree<Tree> implodeDerivation(Collection<Message> messages, Tokens tokens, Derivation derivation, Position startPosition, IToken parentLeftToken) { IProduction production = derivation.production(); if(!production.isContextFree()) throw new RuntimeException("non context free imploding not supported"); List<Tree> childASTs = new ArrayList<>(); List<IToken> unboundTokens = new ArrayList<>(); SubTree<Tree> subTree = implodeChildParseNodes(messages, tokens, childASTs, Arrays.asList(derivation.parseForests()), derivation.production(), unboundTokens, startPosition, parentLeftToken); subTree.tree = createContextFreeTerm(derivation.production(), childASTs, subTree.leftToken, subTree.rightToken); if(production.isRecovery()) messages.add(RecoveryMessages.get(production, startPosition, subTree.endPosition)); for(IToken token : unboundTokens) tokenTreeBinding(token, subTree.tree); return subTree; } protected SubTree<Tree> implodeListDerivation(Collection<Message> messages, Tokens tokens, IProduction production, List<ParseForest> childParseForests, Position startPosition, IToken parentLeftToken) { List<Tree> childASTs = new ArrayList<>(); List<IToken> unboundTokens = new ArrayList<>(); SubTree<Tree> subTree = implodeChildParseNodes(messages, tokens, childASTs, childParseForests, production, unboundTokens, startPosition, parentLeftToken); subTree.tree = createContextFreeTerm(production, childASTs, subTree.leftToken, subTree.rightToken); for(IToken token : unboundTokens) tokenTreeBinding(token, subTree.tree); return subTree; } protected SubTree<Tree> implodeChildParseNodes(Collection<Message> messages, Tokens tokens, List<Tree> childASTs, Iterable<ParseForest> childParseForests, IProduction production, List<IToken> unboundTokens, Position startPosition, IToken parentLeftToken) { SubTree<Tree> result = new SubTree<>(null, startPosition, parentLeftToken, null); Position pivotPosition = startPosition; IToken pivotToken = parentLeftToken; for(ParseForest childParseForest : childParseForests) { @SuppressWarnings("unchecked") ParseNode childParseNode = (ParseNode) childParseForest; IProduction childProduction = childParseNode.production(); SubTree<Tree> subTree; if(production.isList() && ( //@formatter:off // Constraints for flattening nested lists productions: childProduction.isList() && // The subtree is a list childProduction.constructor() == null && // The subtree has no constructor childParseNode.getPreferredAvoidedDerivations().size() <= 1 && // The subtree is not ambiguous !production.isLexical() // Not in lexical context; otherwise just implode as lexical token //@formatter:on )) { // Make sure lists are flattened subTree = implodeChildParseNodes(messages, tokens, childASTs, Arrays.asList(childParseNode.getFirstDerivation().parseForests()), childProduction, unboundTokens, pivotPosition, pivotToken); } else { subTree = implodeParseNode(childParseNode, messages, tokens, pivotPosition, pivotToken); if(subTree.tree != null) childASTs.add(subTree.tree); // Collect tokens that are not bound to a tree such that they can later be bound to the resulting // parent tree if(subTree.tree == null) { if(subTree.leftToken != null) unboundTokens.add(subTree.leftToken); // Make sure that if subTree.leftToken == subTree.rightToken it is not considered twice if(subTree.rightToken != null && subTree.rightToken != subTree.leftToken) unboundTokens.add(subTree.rightToken); } } // Set the parent tree left and right token from the outermost non-layout left and right child tokens if(!childProduction.isLayout()) { if(result.leftToken == null) result.leftToken = subTree.leftToken; if(subTree.rightToken != null) { result.rightToken = subTree.rightToken; pivotToken = subTree.rightToken; } } pivotPosition = subTree.endPosition; } result.endPosition = pivotPosition; return result; } protected Tree createContextFreeTerm(IProduction production, List<Tree> childASTs, IToken leftToken, IToken rightToken) { String constructor = production.constructor(); if(constructor != null) return treeFactory.createNonTerminal(production.lhs(), constructor, childASTs, leftToken, rightToken); else if(production.isOptional()) return treeFactory.createOptional(production.lhs(), childASTs, leftToken, rightToken); else if(production.isList()) return treeFactory.createList(childASTs, leftToken, rightToken); else if(childASTs.size() == 1) return treeFactory.createInjection(production.lhs(), childASTs.get(0), production.isBracket()); else return treeFactory.createTuple(childASTs, leftToken, rightToken); } protected Tree createLexicalTerm(IProduction production, String lexicalString, IToken lexicalToken) { Tree lexicalTerm; if(production.lhs() instanceof IMetaVarSymbol) lexicalTerm = treeFactory.createMetaVar((IMetaVarSymbol) production.lhs(), lexicalString, lexicalToken); else lexicalTerm = treeFactory.createStringTerminal(production.lhs(), lexicalString, lexicalToken); if(lexicalToken != null) // Can be null, e.g. for empty string lexicals tokenTreeBinding(lexicalToken, lexicalTerm); return lexicalTerm; } protected abstract void tokenTreeBinding(IToken token, Tree tree); }
package vg.civcraft.mc.civmodcore.world.locations.chunkmeta; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.TreeMap; import java.util.function.Supplier; import java.util.logging.Level; import org.bukkit.World; import vg.civcraft.mc.civmodcore.CivModCorePlugin; import vg.civcraft.mc.civmodcore.world.locations.chunkmeta.api.ChunkMetaViewTracker; public class ChunkCoord extends XZWCoord { /** * When was this chunk last loaded in Minecraft as UNIX timestamp */ private long lastLoadingTime; /** * When was this chunk last unloaded in Minecraft as UNIX timestamp */ private long lastUnloadingTime; /** * Each ChunkMeta belongs to one plugin, they are identified by the plugin id */ private Map<Short, ChunkMeta<?>> chunkMetas; /** * Set to true once all data has been loaded for this chunk and stays true for * the entire life time of this object */ private boolean isFullyLoaded; private World world; ChunkCoord(int x, int z, short worldID, World world) { super(x, z, worldID); this.world = world; this.chunkMetas = new TreeMap<>(); this.isFullyLoaded = false; this.lastLoadingTime = -1; this.lastUnloadingTime = -1; } /** * @return World this instance is in */ public World getWorld() { return world; } void addChunkMeta(ChunkMeta<?> chunkMeta) { chunkMeta.setWorld(this.world); chunkMetas.put(chunkMeta.getPluginID(), chunkMeta); } /** * Writes all data held by this instance to the database * */ void fullyPersist() { for (ChunkMeta<?> chunkMeta : chunkMetas.values()) { persistChunkMeta(chunkMeta); } } /** * Writes all data held by this instance for one specific plugin to the database * @param id Internal id of the plugin to save data for */ void persistPlugin(short id) { ChunkMeta<?> chunkMeta = chunkMetas.get(id); if (chunkMeta != null) { persistChunkMeta(chunkMeta); } } private static void persistChunkMeta(ChunkMeta<?> chunkMeta) { switch (chunkMeta.getCacheState()) { case NORMAL: break; case MODIFIED: chunkMeta.update(); break; case NEW: chunkMeta.insert(); break; case DELETED: chunkMeta.delete(); } chunkMeta.setCacheState(CacheState.NORMAL); } /** * Forget all data which is not supposed to be held in memory permanently */ void deleteNonPersistentData() { Iterator<Entry<Short,ChunkMeta<?>>> iter = chunkMetas.entrySet().iterator(); while(iter.hasNext()) { ChunkMeta<?> meta = iter.next().getValue(); if (!meta.loadAlways()) { iter.remove(); } } } /** * @return When was the minecraft chunk (the block data) this object is tied * last loaded (UNIX timestamp) */ long getLastMCLoadingTime() { return lastLoadingTime; } /** * @return When was the minecraft chunk (the block data) this object is tied * last unloaded (UNIX timestamp) */ long getLastMCUnloadingTime() { return lastUnloadingTime; } ChunkMeta<?> getMeta(short pluginID, boolean alwaysLoaded) { if (!alwaysLoaded && !isFullyLoaded) { // check before taking monitor. This is fine, because the loaded flag will never // switch from true to false, // only the other way around synchronized (this) { while (!isFullyLoaded) { try { wait(); } catch (InterruptedException e) { // whatever e.printStackTrace(); } } } } return chunkMetas.get(pluginID); } boolean hasPermanentlyLoadedData() { for (ChunkMeta<?> meta : chunkMetas.values()) { if (meta.loadAlways()) { return true; } } return false; } /** * Loads data for all plugins for this chunk */ void loadAll() { synchronized (this) { if (isFullyLoaded) { return; } for (Entry<Short, Supplier<ChunkMeta<?>>> generator : ChunkMetaFactory.getInstance() .getEmptyChunkFunctions()) { ChunkMeta<?> chunk = generator.getValue().get(); chunk.setChunkCoord(this); short pluginID = generator.getKey(); chunk.setPluginID(pluginID); try { chunk.populate(); } catch (Throwable e) { // need to catch everything here, otherwise we block the main thread forever // once it tries to read this CivModCorePlugin.getInstance().getLogger().log(Level.SEVERE, "Failed to load chunk data", e); } ChunkMetaViewTracker.getInstance().get(pluginID).postLoad(chunk); addChunkMeta(chunk); } isFullyLoaded = true; this.notifyAll(); } } /** * Called when the minecraft chunk (the block data) this object is tied to gets * loaded */ void minecraftChunkLoaded() { boolean hasBeenLoadedBefore = this.lastLoadingTime != -1; this.lastLoadingTime = System.currentTimeMillis(); if (hasBeenLoadedBefore) { for(ChunkMeta <?> meta : chunkMetas.values()) { meta.handleChunkCacheReuse(); } } } public boolean isChunkLoaded() { if (this.lastUnloadingTime > 0) { return this.lastUnloadingTime < this.lastLoadingTime; } else { return this.lastLoadingTime > 0; } } /** * Called when the minecraft chunk (the block data) this object is tied to gets * unloaded */ void minecraftChunkUnloaded() { this.lastUnloadingTime = System.currentTimeMillis(); for(ChunkMeta <?> meta : chunkMetas.values()) { meta.handleChunkUnload(); } } }
package com.intellij.codeInsight.documentation; import com.intellij.codeInsight.CodeInsightBundle; import com.intellij.codeInsight.TargetElementUtil; import com.intellij.codeInsight.documentation.actions.ShowQuickDocInfoAction; import com.intellij.codeInsight.hint.HintManagerImpl; import com.intellij.codeInsight.hint.ParameterInfoController; import com.intellij.codeInsight.lookup.Lookup; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.codeInsight.lookup.LookupEx; import com.intellij.codeInsight.lookup.LookupManager; import com.intellij.ide.BrowserUtil; import com.intellij.ide.actions.BaseNavigateToSourceAction; import com.intellij.ide.highlighter.ArchiveFileType; import com.intellij.ide.util.PropertiesComponent; import com.intellij.ide.util.gotoByName.ChooseByNameBase; import com.intellij.lang.Language; import com.intellij.lang.LanguageDocumentation; import com.intellij.lang.documentation.*; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.actionSystem.ex.AnActionListener; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.fileTypes.PlainTextFileType; import com.intellij.openapi.fileTypes.UnknownFileType; import com.intellij.openapi.preview.PreviewManager; import com.intellij.openapi.project.IndexNotReadyException; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.OrderEntry; import com.intellij.openapi.roots.libraries.LibraryUtil; import com.intellij.openapi.roots.ui.configuration.ProjectSettingsService; import com.intellij.openapi.ui.popup.JBPopup; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.util.*; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VfsUtilCore; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.*; import com.intellij.openapi.wm.ex.ToolWindowEx; import com.intellij.openapi.wm.ex.WindowManagerEx; import com.intellij.psi.*; import com.intellij.psi.presentation.java.SymbolPresentationUtil; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtilCore; import com.intellij.ui.ScrollingUtil; import com.intellij.ui.content.Content; import com.intellij.ui.popup.AbstractPopup; import com.intellij.ui.popup.PopupUpdateProcessor; import com.intellij.util.Alarm; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.text.DateFormatUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.File; import java.io.IOException; import java.lang.ref.WeakReference; import java.nio.file.Files; import java.nio.file.Paths; import java.nio.file.attribute.BasicFileAttributes; import java.util.*; import java.util.List; import static com.intellij.openapi.wm.IdeFocusManager.getGlobalInstance; public class DocumentationManager extends DockablePopupManager<DocumentationComponent> { @NonNls public static final String JAVADOC_LOCATION_AND_SIZE = "javadoc.popup"; @NonNls public static final String NEW_JAVADOC_LOCATION_AND_SIZE = "javadoc.popup.new"; public static final DataKey<String> SELECTED_QUICK_DOC_TEXT = DataKey.create("QUICK_DOC.SELECTED_TEXT"); private static final Logger LOG = Logger.getInstance(DocumentationManager.class); private static final String SHOW_DOCUMENTATION_IN_TOOL_WINDOW = "ShowDocumentationInToolWindow"; private static final String DOCUMENTATION_AUTO_UPDATE_ENABLED = "DocumentationAutoUpdateEnabled"; private static final long DOC_GENERATION_TIMEOUT_MILLISECONDS = 60000; private static final long DOC_GENERATION_PAUSE_MILLISECONDS = 100; private Editor myEditor; private final Alarm myUpdateDocAlarm; private WeakReference<JBPopup> myDocInfoHintRef; private Component myPreviouslyFocused; public static final Key<SmartPsiElementPointer> ORIGINAL_ELEMENT_KEY = Key.create("Original element"); private final ActionManager myActionManager; private final TargetElementUtil myTargetElementUtil; private boolean myCloseOnSneeze; private String myPrecalculatedDocumentation; private ActionCallback myLastAction; private DocumentationComponent myTestDocumentationComponent; private AnAction myRestorePopupAction; @Override protected String getToolwindowId() { return ToolWindowId.DOCUMENTATION; } @Override protected DocumentationComponent createComponent() { return new DocumentationComponent(this); } @Override protected String getRestorePopupDescription() { return "Restore popup view mode"; } @Override protected String getAutoUpdateDescription() { return "Refresh documentation on selection change automatically"; } @Override protected String getAutoUpdateTitle() { return "Auto-update from Source"; } @Override protected boolean getAutoUpdateDefault() { return true; } @NotNull @Override protected AnAction createRestorePopupAction() { myRestorePopupAction = super.createRestorePopupAction(); return myRestorePopupAction; } @Override public void restorePopupBehavior() { if (myPreviouslyFocused != null) { IdeFocusManager.getInstance(myProject).requestFocus(myPreviouslyFocused, true); } super.restorePopupBehavior(); updateComponent(true); } @Override public void createToolWindow(PsiElement element, PsiElement originalElement) { super.createToolWindow(element, originalElement); if (myToolWindow != null) { myToolWindow.getComponent().putClientProperty(ChooseByNameBase.TEMPORARILY_FOCUSABLE_COMPONENT_KEY, Boolean.TRUE); if (myRestorePopupAction != null) { ShortcutSet quickDocShortcut = ActionManager.getInstance().getAction(IdeActions.ACTION_QUICK_JAVADOC).getShortcutSet(); myRestorePopupAction.registerCustomShortcutSet(quickDocShortcut, myToolWindow.getComponent()); myRestorePopupAction = null; } } } /** * @return {@code true} if quick doc control is configured to not prevent user-IDE interaction (e.g. should be closed if * the user presses a key); * {@code false} otherwise */ public boolean isCloseOnSneeze() { return myCloseOnSneeze; } @Override protected void installComponentActions(ToolWindow toolWindow, DocumentationComponent component) { ((ToolWindowEx)toolWindow).setTitleActions(component.getActions()); DefaultActionGroup group = new DefaultActionGroup(createActions()); group.add(component.getFontSizeAction()); ((ToolWindowEx)toolWindow).setAdditionalGearActions(group); component.removeCornerMenu(); } @Override protected void setToolwindowDefaultState() { final Rectangle rectangle = WindowManager.getInstance().getIdeFrame(myProject).suggestChildFrameBounds(); myToolWindow.setDefaultState(ToolWindowAnchor.RIGHT, ToolWindowType.DOCKED, new Rectangle(rectangle.width / 4, rectangle.height)); myToolWindow.setType(ToolWindowType.DOCKED, null); myToolWindow.setSplitMode(true, null); myToolWindow.setAutoHide(false); } public static DocumentationManager getInstance(Project project) { return ServiceManager.getService(project, DocumentationManager.class); } public DocumentationManager(final Project project, ActionManager manager, TargetElementUtil targetElementUtil) { super(project); myActionManager = manager; final AnActionListener actionListener = new AnActionListener() { @Override public void beforeActionPerformed(AnAction action, DataContext dataContext, AnActionEvent event) { final JBPopup hint = getDocInfoHint(); if (hint != null) { if (action instanceof ShowQuickDocInfoAction) { ((AbstractPopup)hint).focusPreferredComponent(); return; } if (action instanceof HintManagerImpl.ActionToIgnore) return; if (action instanceof ScrollingUtil.ScrollingAction) return; if (action == myActionManager.getAction(IdeActions.ACTION_EDITOR_MOVE_CARET_DOWN)) return; if (action == myActionManager.getAction(IdeActions.ACTION_EDITOR_MOVE_CARET_UP)) return; if (action == myActionManager.getAction(IdeActions.ACTION_EDITOR_MOVE_CARET_PAGE_DOWN)) return; if (action == myActionManager.getAction(IdeActions.ACTION_EDITOR_MOVE_CARET_PAGE_UP)) return; if (action == ActionManager.getInstance().getAction(IdeActions.ACTION_EDITOR_ESCAPE)) return; if (ActionPlaces.JAVADOC_INPLACE_SETTINGS.equals(event.getPlace())) return; if (action instanceof BaseNavigateToSourceAction) return; closeDocHint(); } } @Override public void beforeEditorTyping(char c, DataContext dataContext) { final JBPopup hint = getDocInfoHint(); if (hint != null && LookupManager.getActiveLookup(myEditor) == null) { hint.cancel(); } } }; myActionManager.addAnActionListener(actionListener, project); myUpdateDocAlarm = new Alarm(Alarm.ThreadToUse.POOLED_THREAD,myProject); myTargetElementUtil = targetElementUtil; } private void closeDocHint() { JBPopup hint = getDocInfoHint(); if (hint == null) { return; } myCloseOnSneeze = false; hint.cancel(); Component toFocus = myPreviouslyFocused; hint.cancel(); if (toFocus != null) { IdeFocusManager.getInstance(myProject).requestFocus(toFocus, true); } } public void setAllowContentUpdateFromContext(boolean allow) { if (hasActiveDockedDocWindow()) { restartAutoUpdate(allow); } } public void updateToolwindowContext() { if (hasActiveDockedDocWindow()) { updateComponent(); } } public void showJavaDocInfoAtToolWindow(@NotNull PsiElement element, @NotNull PsiElement original) { final Content content = recreateToolWindow(element, original); if (content == null) return; fetchDocInfo(getDefaultCollector(element, original), (DocumentationComponent)content.getComponent(), true); } public void showJavaDocInfo(@NotNull final PsiElement element, final PsiElement original) { showJavaDocInfo(element, original, null); } /** * Asks to show quick doc for the target element. * * @param editor editor with an element for which quick do should be shown * @param element target element which documentation should be shown * @param original element that was used as a quick doc anchor. Example: consider a code like {@code Runnable task;}. * A user wants to see javadoc for the {@code Runnable}, so, original element is a class name from the variable * declaration but {@code 'element'} argument is a {@code Runnable} descriptor * @param closeCallback callback to be notified on target hint close (if any) * @param documentation precalculated documentation * @param closeOnSneeze flag that defines whether quick doc control should be as non-obtrusive as possible. E.g. there are at least * two possible situations - the quick doc is shown automatically on mouse over element; the quick doc is shown * on explicit action call (Ctrl+Q). We want to close the doc on, say, editor viewport position change * at the first situation but don't want to do that at the second */ public void showJavaDocInfo(@NotNull Editor editor, @NotNull final PsiElement element, @NotNull final PsiElement original, @Nullable Runnable closeCallback, @Nullable String documentation, boolean closeOnSneeze) { myEditor = editor; myCloseOnSneeze = closeOnSneeze; showJavaDocInfo(element, original, false, closeCallback, documentation); } public void showJavaDocInfo(@NotNull final PsiElement element, final PsiElement original, @Nullable Runnable closeCallback) { showJavaDocInfo(element, original, false, closeCallback); } public void showJavaDocInfo(@NotNull final PsiElement element, final PsiElement original, final boolean requestFocus, @Nullable Runnable closeCallback) { showJavaDocInfo(element, original, requestFocus, closeCallback, null); } public void showJavaDocInfo(@NotNull final PsiElement element, final PsiElement original, final boolean requestFocus, @Nullable Runnable closeCallback, @Nullable String documentation) { if (!element.isValid()) { return; } PopupUpdateProcessor updateProcessor = new PopupUpdateProcessor(element.getProject()) { @Override public void updatePopup(Object lookupItemObject) { if (lookupItemObject instanceof PsiElement) { doShowJavaDocInfo((PsiElement)lookupItemObject, requestFocus, this, original, null, null); } } }; doShowJavaDocInfo(element, requestFocus, updateProcessor, original, closeCallback, documentation); } public void showJavaDocInfo(final Editor editor, @Nullable final PsiFile file, boolean requestFocus) { showJavaDocInfo(editor, file, requestFocus, null); } public void showJavaDocInfo(final Editor editor, @Nullable final PsiFile file, boolean requestFocus, @Nullable final Runnable closeCallback) { myEditor = editor; final Project project = getProject(file); PsiDocumentManager.getInstance(project).commitAllDocuments(); final PsiElement list = ParameterInfoController.findArgumentList(file, editor.getCaretModel().getOffset(), -1); PsiElement expressionList = null; if (list != null) { LookupEx lookup = LookupManager.getInstance(myProject).getActiveLookup(); if (lookup != null) { expressionList = null; // take completion variants for documentation then } else { expressionList = list; } } final PsiElement originalElement = getContextElement(editor, file); PsiElement element = assertSameProject(findTargetElement(editor, file)); if (element == null && expressionList != null) { element = expressionList; } if (element == null && file == null) return; //file == null for text field editor if (element == null) { // look if we are within a javadoc comment element = assertSameProject(originalElement); if (element == null) return; PsiComment comment = PsiTreeUtil.getParentOfType(element, PsiComment.class); if (comment == null) return; element = comment instanceof PsiDocCommentBase ? ((PsiDocCommentBase)comment).getOwner() : comment.getParent(); if (element == null) return; //if (!(element instanceof PsiDocCommentOwner)) return null; } PsiElement finalElement = element; final PopupUpdateProcessor updateProcessor = new PopupUpdateProcessor(project) { @Override public void updatePopup(Object lookupIteObject) { if (lookupIteObject == null) { doShowJavaDocInfo(finalElement, false, this, originalElement, closeCallback, CodeInsightBundle.message("no.documentation.found")); return; } if (lookupIteObject instanceof PsiElement) { doShowJavaDocInfo((PsiElement)lookupIteObject, false, this, originalElement, closeCallback, null); return; } DocumentationProvider documentationProvider = getProviderFromElement(file); PsiElement element = documentationProvider.getDocumentationElementForLookupItem( PsiManager.getInstance(myProject), lookupIteObject, originalElement ); if (element == null) { doShowJavaDocInfo(finalElement, false, this, originalElement, closeCallback, CodeInsightBundle.message("no.documentation.found")); return; } if (myEditor != null) { final PsiFile file = element.getContainingFile(); if (file != null) { Editor editor = myEditor; showJavaDocInfo(myEditor, file, false); myEditor = editor; } } else { doShowJavaDocInfo(element, false, this, originalElement, closeCallback, null); } } }; doShowJavaDocInfo(element, requestFocus, updateProcessor, originalElement, closeCallback, null); } public PsiElement findTargetElement(Editor editor, PsiFile file) { return findTargetElement(editor, file, getContextElement(editor, file)); } private static PsiElement getContextElement(Editor editor, PsiFile file) { return file != null ? file.findElementAt(editor.getCaretModel().getOffset()) : null; } private void doShowJavaDocInfo(@NotNull final PsiElement element, boolean requestFocus, PopupUpdateProcessor updateProcessor, final PsiElement originalElement, @Nullable final Runnable closeCallback, @Nullable String documentation) { Project project = getProject(element); if (!project.isOpen()) return; storeOriginalElement(project, originalElement, element); myPreviouslyFocused = WindowManagerEx.getInstanceEx().getFocusedComponent(project); JBPopup _oldHint = getDocInfoHint(); if (PreviewManager.SERVICE.preview(myProject, DocumentationPreviewPanelProvider.ID, Couple.of(element, originalElement), requestFocus) != null) { return; } myPrecalculatedDocumentation = documentation; if (myToolWindow == null && PropertiesComponent.getInstance().isTrueValue(SHOW_DOCUMENTATION_IN_TOOL_WINDOW)) { createToolWindow(element, originalElement); } else if (myToolWindow != null) { Content content = myToolWindow.getContentManager().getSelectedContent(); if (content != null) { DocumentationComponent component = (DocumentationComponent)content.getComponent(); boolean sameElement = element.getManager().areElementsEquivalent(component.getElement(), element); if (sameElement) { JComponent preferredFocusableComponent = content.getPreferredFocusableComponent(); // focus toolwindow on the second actionPerformed boolean focus = requestFocus || CommandProcessor.getInstance().getCurrentCommand() != null; if (preferredFocusableComponent != null && focus) { IdeFocusManager.getInstance(myProject).requestFocus(preferredFocusableComponent, true); } } if (!sameElement || !component.isUpToDate()) { content.setDisplayName(getTitle(element, true)); fetchDocInfo(getDefaultCollector(element, originalElement), component, true); } } if (!myToolWindow.isVisible()) { myToolWindow.show(null); } } else if (_oldHint != null && _oldHint.isVisible() && _oldHint instanceof AbstractPopup) { DocumentationComponent oldComponent = (DocumentationComponent)((AbstractPopup)_oldHint).getComponent(); fetchDocInfo(getDefaultCollector(element, originalElement), oldComponent); } else { showInPopup(element, requestFocus, updateProcessor, originalElement, closeCallback); } } private void showInPopup(@NotNull final PsiElement element, boolean requestFocus, PopupUpdateProcessor updateProcessor, final PsiElement originalElement, @Nullable final Runnable closeCallback) { final DocumentationComponent component = myTestDocumentationComponent == null ? new DocumentationComponent(this) : myTestDocumentationComponent; ActionListener actionListener = new ActionListener() { @Override public void actionPerformed(ActionEvent e) { createToolWindow(element, originalElement); final JBPopup hint = getDocInfoHint(); if (hint != null && hint.isVisible()) hint.cancel(); } }; List<Pair<ActionListener, KeyStroke>> actions = ContainerUtil.newSmartList(); AnAction quickDocAction = ActionManager.getInstance().getAction(IdeActions.ACTION_QUICK_JAVADOC); for (Shortcut shortcut : quickDocAction.getShortcutSet().getShortcuts()) { if (!(shortcut instanceof KeyboardShortcut)) continue; actions.add(Pair.create(actionListener, ((KeyboardShortcut)shortcut).getFirstKeyStroke())); } boolean hasLookup = LookupManager.getActiveLookup(myEditor) != null; AbstractPopup hint = (AbstractPopup)JBPopupFactory.getInstance().createComponentPopupBuilder(component, component) .setProject(element.getProject()) .addListener(updateProcessor) .addUserData(updateProcessor) .setKeyboardActions(actions) .setResizable(true) .setMovable(true) .setFocusable(true) .setRequestFocus(requestFocus) .setCancelOnClickOutside(!hasLookup) // otherwise selecting lookup items by mouse would close the doc .setModalContext(false) .setCancelCallback(() -> { if (MenuSelectionManager.defaultManager().getSelectedPath().length > 0) { return false; } myCloseOnSneeze = false; if (closeCallback != null) { closeCallback.run(); } if (fromQuickSearch()) { ((ChooseByNameBase.JPanelProvider)myPreviouslyFocused.getParent()).unregisterHint(); } Disposer.dispose(component); myEditor = null; myPreviouslyFocused = null; return Boolean.TRUE; }) .setKeyEventHandler(e -> { if (myCloseOnSneeze) { closeDocHint(); } if (AbstractPopup.isCloseRequest(e) && getDocInfoHint() != null) { closeDocHint(); return true; } return false; }) .createPopup(); component.setHint(hint); component.setToolwindowCallback(() -> { createToolWindow(element, originalElement); myToolWindow.setAutoHide(false); hint.cancel(); }); if (DimensionService.getInstance().getSize(NEW_JAVADOC_LOCATION_AND_SIZE, myProject) != null) { hint.setDimensionServiceKey(NEW_JAVADOC_LOCATION_AND_SIZE); } if (myEditor == null) { // subsequent invocation of javadoc popup from completion will have myEditor == null because of cancel invoked, // so reevaluate the editor for proper popup placement Lookup lookup = LookupManager.getInstance(myProject).getActiveLookup(); myEditor = lookup != null ? lookup.getEditor() : null; } fetchDocInfo(getDefaultCollector(element, originalElement), component); myDocInfoHintRef = new WeakReference<>(hint); if (fromQuickSearch() && myPreviouslyFocused != null) { ((ChooseByNameBase.JPanelProvider)myPreviouslyFocused.getParent()).registerHint(hint); } } static String getTitle(@NotNull final PsiElement element, final boolean _short) { final String title = SymbolPresentationUtil.getSymbolPresentableText(element); return _short ? "for `" + (title != null ? title : element.getText()) + "`": CodeInsightBundle.message("javadoc.info.title", title != null ? title : element.getText()); } public static void storeOriginalElement(final Project project, final PsiElement originalElement, final PsiElement element) { if (element == null) return; try { element.putUserData( ORIGINAL_ELEMENT_KEY, SmartPointerManager.getInstance(project).createSmartPsiElementPointer(originalElement) ); } catch (RuntimeException ex) { // PsiPackage does not allow putUserData } } @Nullable public PsiElement findTargetElement(@NotNull final Editor editor, @Nullable final PsiFile file, PsiElement contextElement) { return findTargetElement(editor, editor.getCaretModel().getOffset(), file, contextElement); } @Nullable public PsiElement findTargetElement(final Editor editor, int offset, @Nullable final PsiFile file, PsiElement contextElement) { try { return findTargetElementUnsafe(editor, offset, file, contextElement); } catch (IndexNotReadyException inre) { LOG.warn("Index not ready"); LOG.debug(inre); return null; } } /** * in case index is not ready will throw IndexNotReadyException */ @Nullable private PsiElement findTargetElementUnsafe(final Editor editor, int offset, @Nullable final PsiFile file, PsiElement contextElement) { TargetElementUtil util = TargetElementUtil.getInstance(); PsiElement element = assertSameProject(getElementFromLookup(editor, file)); if (element == null && file != null) { final DocumentationProvider documentationProvider = getProviderFromElement(file); if (documentationProvider instanceof DocumentationProviderEx) { element = assertSameProject(((DocumentationProviderEx)documentationProvider).getCustomDocumentationElement(editor, file, contextElement)); } } if (element == null) { element = assertSameProject(util.findTargetElement(editor, myTargetElementUtil.getAllAccepted(), offset)); // Allow context doc over xml tag content if (element != null || contextElement != null) { final PsiElement adjusted = assertSameProject(util.adjustElement(editor, myTargetElementUtil.getAllAccepted(), element, contextElement)); if (adjusted != null) { element = adjusted; } } } if (element == null) { final PsiReference ref = TargetElementUtil.findReference(editor, offset); if (ref != null) { element = assertSameProject(util.adjustReference(ref)); if (ref instanceof PsiPolyVariantReference) { element = assertSameProject(ref.getElement()); } } } storeOriginalElement(myProject, contextElement, element); return element; } @Nullable public PsiElement getElementFromLookup(final Editor editor, @Nullable final PsiFile file) { final Lookup activeLookup = LookupManager.getInstance(myProject).getActiveLookup(); if (activeLookup != null) { LookupElement item = activeLookup.getCurrentItem(); if (item != null) { int offset = editor.getCaretModel().getOffset(); if (offset > 0 && offset == editor.getDocument().getTextLength()) offset PsiReference ref = TargetElementUtil.findReference(editor, offset); PsiElement contextElement = file == null? null : file.findElementAt(offset); PsiElement targetElement = ref != null ? ref.getElement() : contextElement; if (targetElement != null) { PsiUtilCore.ensureValid(targetElement); } DocumentationProvider documentationProvider = getProviderFromElement(file); PsiManager psiManager = PsiManager.getInstance(myProject); return documentationProvider.getDocumentationElementForLookupItem(psiManager, item.getObject(), targetElement); } } return null; } private boolean fromQuickSearch() { return myPreviouslyFocused != null && myPreviouslyFocused.getParent() instanceof ChooseByNameBase.JPanelProvider; } public String generateDocumentation(@NotNull final PsiElement element, @Nullable final PsiElement originalElement) throws Exception { return getDefaultCollector(element, originalElement).getDocumentation(); } private DocumentationCollector getDefaultCollector(@NotNull final PsiElement element, @Nullable final PsiElement originalElement) { return new DefaultDocumentationCollector(element, originalElement); } private DocumentationCollector getDefaultCollector(@NotNull final PsiElement element, String ref) { return new DefaultDocumentationCollector(element, null, ref); } @Nullable public JBPopup getDocInfoHint() { if (myDocInfoHintRef == null) return null; JBPopup hint = myDocInfoHintRef.get(); if (hint == null || !hint.isVisible() && !ApplicationManager.getApplication().isUnitTestMode()) { myDocInfoHintRef = null; return null; } return hint; } public void fetchDocInfo(final DocumentationCollector provider, final DocumentationComponent component) { doFetchDocInfo(component, provider, true, false); } public void fetchDocInfo(final DocumentationCollector provider, final DocumentationComponent component, final boolean clearHistory) { doFetchDocInfo(component, provider, true, clearHistory); } public void fetchDocInfo(final PsiElement element, final DocumentationComponent component) { doFetchDocInfo(component, getDefaultCollector(element, (PsiElement)null), true, false); } private ActionCallback queueFetchDocInfo(final DocumentationCollector provider, final DocumentationComponent component) { return doFetchDocInfo(component, provider, false, false); } public ActionCallback queueFetchDocInfo(final PsiElement element, final DocumentationComponent component) { return queueFetchDocInfo(getDefaultCollector(element, (PsiElement)null), component); } private ActionCallback doFetchDocInfo(final DocumentationComponent component, final DocumentationCollector provider, final boolean cancelRequests, final boolean clearHistory) { final ActionCallback callback = new ActionCallback(); myLastAction = callback; if (myPrecalculatedDocumentation != null) { LOG.debug("Setting precalculated documentation"); PsiElement element = provider.getElement(); component.setData(element, myPrecalculatedDocumentation, clearHistory, provider.getEffectiveExternalUrl(), provider.getRef()); callback.setDone(); return callback; } boolean wasEmpty = component.isEmpty(); component.startWait(); if (cancelRequests) { myUpdateDocAlarm.cancelAllRequests(); } if (wasEmpty) { component.setText(CodeInsightBundle.message("javadoc.fetching.progress"), null, clearHistory); final AbstractPopup jbPopup = (AbstractPopup)getDocInfoHint(); if (jbPopup != null) { jbPopup.setDimensionServiceKey(null); } } ModalityState modality = ModalityState.defaultModalityState(); myUpdateDocAlarm.addRequest(() -> { if (myProject.isDisposed()) return; LOG.debug("Started fetching documentation..."); final PsiElement element = ReadAction.compute(() -> provider.getElement()); if (element == null) { LOG.debug("Element for which documentation was requested is not available anymore"); return; } final Throwable[] ex = new Throwable[1]; String text = null; try { text = provider.getDocumentation(); } catch (Throwable e) { LOG.info(e); ex[0] = e; } if (ex[0] != null) { //noinspection SSBasedInspection SwingUtilities.invokeLater(() -> { String message = ex[0] instanceof IndexNotReadyException ? "Documentation is not available until indices are built." : CodeInsightBundle.message("javadoc.external.fetch.error.message"); component.setText(message, null, true); callback.setDone(); }); return; } LOG.debug("Documentation fetched successfully:\n", text); final String documentationText = text; PsiDocumentManager.getInstance(myProject).performLaterWhenAllCommitted(() -> { if (!element.isValid()) { LOG.debug("Element for which documentation was requested is not valid"); callback.setDone(); return; } if (documentationText == null) { component.setText(CodeInsightBundle.message("no.documentation.found"), element, true, clearHistory); } else if (documentationText.isEmpty()) { component.setText(component.getText(), element, true, clearHistory); } else { component.setData(element, documentationText, clearHistory, provider.getEffectiveExternalUrl(), provider.getRef()); } callback.setDone(); }, modality); }, 10); return callback; } @NotNull public static DocumentationProvider getProviderFromElement(final PsiElement element) { return getProviderFromElement(element, null); } @NotNull public static DocumentationProvider getProviderFromElement(@Nullable PsiElement element, @Nullable PsiElement originalElement) { if (element != null && !element.isValid()) { element = null; } if (originalElement != null && !originalElement.isValid()) { originalElement = null; } if (originalElement == null) { originalElement = getOriginalElement(element); } PsiFile containingFile = originalElement != null ? originalElement.getContainingFile() : element != null ? element.getContainingFile() : null; Set<DocumentationProvider> result = new LinkedHashSet<>(); final Language containingFileLanguage = containingFile != null ? containingFile.getLanguage() : null; DocumentationProvider originalProvider = containingFile != null ? LanguageDocumentation.INSTANCE.forLanguage(containingFileLanguage) : null; final Language elementLanguage = element != null ? element.getLanguage() : null; DocumentationProvider elementProvider = element == null || elementLanguage.is(containingFileLanguage) ? null : LanguageDocumentation.INSTANCE.forLanguage(elementLanguage); result.add(elementProvider); result.add(originalProvider); if (containingFile != null) { final Language baseLanguage = containingFile.getViewProvider().getBaseLanguage(); if (!baseLanguage.is(containingFileLanguage)) { result.add(LanguageDocumentation.INSTANCE.forLanguage(baseLanguage)); } } else if (element instanceof PsiDirectory) { final Set<Language> langs = new HashSet<>(); for (PsiFile file : ((PsiDirectory)element).getFiles()) { final Language baseLanguage = file.getViewProvider().getBaseLanguage(); if (!langs.contains(baseLanguage)) { langs.add(baseLanguage); result.add(LanguageDocumentation.INSTANCE.forLanguage(baseLanguage)); } } } return CompositeDocumentationProvider.wrapProviders(result); } @Nullable public static PsiElement getOriginalElement(final PsiElement element) { SmartPsiElementPointer originalElementPointer = element!=null ? element.getUserData(ORIGINAL_ELEMENT_KEY):null; return originalElementPointer != null ? originalElementPointer.getElement() : null; } void navigateByLink(final DocumentationComponent component, final String url) { component.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); final PsiElement psiElement = component.getElement(); if (psiElement == null) { return; } final PsiManager manager = PsiManager.getInstance(getProject(psiElement)); if (url.equals("external_doc")) { component.showExternalDoc(); return; } if (url.startsWith("open")) { final PsiFile containingFile = psiElement.getContainingFile(); OrderEntry libraryEntry = null; if (containingFile != null) { final VirtualFile virtualFile = containingFile.getVirtualFile(); libraryEntry = LibraryUtil.findLibraryEntry(virtualFile, myProject); } else if (psiElement instanceof PsiDirectoryContainer) { PsiDirectory[] directories = ((PsiDirectoryContainer)psiElement).getDirectories(); for (PsiDirectory directory : directories) { final VirtualFile virtualFile = directory.getVirtualFile(); libraryEntry = LibraryUtil.findLibraryEntry(virtualFile, myProject); if (libraryEntry != null) { break; } } } if (libraryEntry != null) { ProjectSettingsService.getInstance(myProject).openLibraryOrSdkSettings(libraryEntry); } } else if (url.startsWith(DocumentationManagerProtocol.PSI_ELEMENT_PROTOCOL)) { String refText = url.substring(DocumentationManagerProtocol.PSI_ELEMENT_PROTOCOL.length()); int separatorPos = refText.lastIndexOf(DocumentationManagerProtocol.PSI_ELEMENT_PROTOCOL_REF_SEPARATOR); String ref = null; if (separatorPos >= 0) { ref = refText.substring(separatorPos + DocumentationManagerProtocol.PSI_ELEMENT_PROTOCOL_REF_SEPARATOR.length()); refText = refText.substring(0, separatorPos); } DocumentationProvider provider = getProviderFromElement(psiElement); PsiElement targetElement = provider.getDocumentationElementForLink(manager, refText, psiElement); if (targetElement == null) { for (DocumentationProvider documentationProvider : Extensions.getExtensions(DocumentationProvider.EP_NAME)) { targetElement = documentationProvider.getDocumentationElementForLink(manager, refText, psiElement); if (targetElement != null) { break; } } } if (targetElement == null) { for (Language language : Language.getRegisteredLanguages()) { DocumentationProvider documentationProvider = LanguageDocumentation.INSTANCE.forLanguage(language); if (documentationProvider != null) { targetElement = documentationProvider.getDocumentationElementForLink(manager, refText, psiElement); if (targetElement != null) { break; } } } } if (targetElement != null) { fetchDocInfo(getDefaultCollector(targetElement, ref), component); } } else { final DocumentationProvider provider = getProviderFromElement(psiElement); boolean processed = false; if (provider instanceof CompositeDocumentationProvider) { for (DocumentationProvider p : ((CompositeDocumentationProvider)provider).getAllProviders()) { if (!(p instanceof ExternalDocumentationHandler)) continue; final ExternalDocumentationHandler externalHandler = (ExternalDocumentationHandler)p; if (externalHandler.canFetchDocumentationLink(url)) { fetchDocInfo(new DocumentationCollector() { @Override public String getDocumentation() { return externalHandler.fetchExternalDocumentation(url, psiElement); } @Override public PsiElement getElement() { return psiElement; } @Override public String getEffectiveExternalUrl() { return url; } @Nullable @Override public String getRef() { return null; } }, component); processed = true; } else if (externalHandler.handleExternalLink(manager, url, psiElement)) { processed = true; break; } } } if (!processed) { fetchDocInfo (new DocumentationCollector() { @Override public String getDocumentation() { if (BrowserUtil.isAbsoluteURL(url)) { BrowserUtil.browse(url); return ""; } else { return CodeInsightBundle.message("javadoc.error.resolving.url", url); } } @Override public PsiElement getElement() { //String loc = getElementLocator(docUrl); //if (loc != null) { // PsiElement context = component.getElement(); // return JavaDocUtil.findReferenceTarget(context.getManager(), loc, context); return psiElement; } @Override public String getEffectiveExternalUrl() { return url; } @Nullable @Override public String getRef() { return null; } }, component); } } component.setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } public void requestFocus() { if (fromQuickSearch()) { getGlobalInstance().doWhenFocusSettlesDown(() -> getGlobalInstance().requestFocus(myPreviouslyFocused.getParent(), true)); } } public Project getProject(@Nullable final PsiElement element) { assertSameProject(element); return myProject; } private PsiElement assertSameProject(@Nullable PsiElement element) { if (element != null && element.isValid() && myProject != element.getProject()) { throw new AssertionError(myProject + "!=" + element.getProject() + "; element=" + element); } return element; } public static void createHyperlink(StringBuilder buffer, String refText,String label,boolean plainLink) { DocumentationManagerUtil.createHyperlink(buffer, refText, label, plainLink); } @Override public String getShowInToolWindowProperty() { return SHOW_DOCUMENTATION_IN_TOOL_WINDOW; } @Override public String getAutoUpdateEnabledProperty() { return DOCUMENTATION_AUTO_UPDATE_ENABLED; } @Override protected void doUpdateComponent(PsiElement element, PsiElement originalElement, DocumentationComponent component) { fetchDocInfo(getDefaultCollector(element, originalElement), component); } @Override protected void doUpdateComponent(Editor editor, PsiFile psiFile, boolean requestFocus) { showJavaDocInfo(editor, psiFile, requestFocus, null); } @Override protected void doUpdateComponent(Editor editor, PsiFile psiFile) { doUpdateComponent(editor, psiFile, false); } @Override protected void doUpdateComponent(@NotNull PsiElement element) { showJavaDocInfo(element, element, null); } @Override protected String getTitle(PsiElement element) { return getTitle(element, true); } @Nullable public Image getElementImage(@NotNull PsiElement element, @NotNull String imageSpec) { DocumentationProvider provider = getProviderFromElement(element); if (provider instanceof CompositeDocumentationProvider) { for (DocumentationProvider p : ((CompositeDocumentationProvider)provider).getAllProviders()) { if (p instanceof DocumentationProviderEx) { Image image = ((DocumentationProviderEx)p).getLocalImageForElement(element, imageSpec); if (image != null) return image; } } } return null; } Editor getEditor() { return myEditor; } @TestOnly public ActionCallback getLastAction() { return myLastAction; } @TestOnly public void setDocumentationComponent(DocumentationComponent documentationComponent) { myTestDocumentationComponent = documentationComponent; } private interface DocumentationCollector { @Nullable String getDocumentation() throws Exception; @Nullable PsiElement getElement(); @Nullable String getEffectiveExternalUrl(); @Nullable String getRef(); } private class DefaultDocumentationCollector implements DocumentationCollector { private final PsiElement myElement; private final PsiElement myOriginalElement; private final String myRef; private String myEffectiveUrl; private DefaultDocumentationCollector(PsiElement element, PsiElement originalElement) { this(element, originalElement, null); } private DefaultDocumentationCollector(PsiElement element, PsiElement originalElement, String ref) { myElement = element; myOriginalElement = originalElement; myRef = ref; } @Override @Nullable public String getDocumentation() { final DocumentationProvider provider = ReadAction.compute(() -> getProviderFromElement(myElement, myOriginalElement)); LOG.debug("Using provider ", provider); if (provider instanceof ExternalDocumentationProvider) { final List<String> urls = ApplicationManager.getApplication().runReadAction( (NullableComputable<List<String>>)() -> { final SmartPsiElementPointer originalElementPtr = myElement.getUserData(ORIGINAL_ELEMENT_KEY); final PsiElement originalElement = originalElementPtr != null ? originalElementPtr.getElement() : null; return provider.getUrlFor(myElement, originalElement); } ); LOG.debug("External documentation URLs: ", urls); if (urls != null) { for (String url : urls) { final String doc = ((ExternalDocumentationProvider)provider).fetchExternalDocumentation(myProject, myElement, Collections.singletonList(url)); if (doc != null) { LOG.debug("Fetched documentation from ", url); myEffectiveUrl = url; return doc; } } } } final Ref<String> result = new Ref<>(); QuickDocUtil.runInReadActionWithWriteActionPriorityWithRetries(() -> { if (!myElement.isValid()) return; SmartPsiElementPointer originalPointer = myElement.getUserData(ORIGINAL_ELEMENT_KEY); PsiElement originalPsi = originalPointer != null ? originalPointer.getElement() : null; String doc = provider.generateDoc(myElement, originalPsi); if (doc == null && myElement instanceof PsiFile) { doc = generateFileDoc((PsiFile)myElement); } result.set(doc); }, DOC_GENERATION_TIMEOUT_MILLISECONDS, DOC_GENERATION_PAUSE_MILLISECONDS, null); return result.get(); } @Override @Nullable public PsiElement getElement() { return myElement.isValid() ? myElement : null; } @Nullable @Override public String getEffectiveExternalUrl() { return myEffectiveUrl; } @Nullable @Override public String getRef() { return myRef; } } @Nullable private static String generateFileDoc(@NotNull PsiFile psiFile) { VirtualFile file = PsiUtilCore.getVirtualFile(psiFile); File ioFile = file == null ? null : VfsUtilCore.virtualToIoFile(file); BasicFileAttributes attr = null; try { attr = ioFile == null ? null : Files.readAttributes(Paths.get(ioFile.toURI()), BasicFileAttributes.class); } catch (IOException ignored) { } if (attr == null) return null; FileType type = psiFile.getFileType(); String typeName = type == UnknownFileType.INSTANCE ? "Unknown" : type == PlainTextFileType.INSTANCE ? "Text" : type == ArchiveFileType.INSTANCE ? "Archive" : type.getName(); String text = "File size " + StringUtil.formatFileSize(attr.size()) + "\n" + typeName + (type.isBinary() ? "" : " (" + psiFile.getLanguage().getDisplayName() + ")") + "\nModified on " + DateFormatUtil.formatDateTime(attr.lastModifiedTime().toMillis()) + "\nCreated on " + DateFormatUtil.formatDateTime(attr.creationTime().toMillis()); return StringUtil.replace(StringUtil.escapeXml(text), "\n", "<br>"); } }
package com.intellij.openapi.editor.impl; import com.intellij.codeInsight.daemon.impl.HighlightInfo; import com.intellij.codeInsight.hint.HintManager; import com.intellij.codeInsight.hint.HintManagerImpl; import com.intellij.openapi.editor.LogicalPosition; import com.intellij.openapi.editor.colors.EditorColors; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.ex.EditorGutterComponentEx; import com.intellij.openapi.editor.ex.RangeHighlighterEx; import com.intellij.openapi.editor.ex.util.EditorUIUtil; import com.intellij.openapi.ui.MessageType; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.*; import com.intellij.ui.scale.JBUIScale; import com.intellij.util.Alarm; import com.intellij.util.ui.GraphicsUtil; import com.intellij.util.ui.ImageUtil; import com.intellij.util.ui.StartupUiUtil; import com.intellij.util.ui.UIUtil; import gnu.trove.TIntIntHashMap; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.geom.AffineTransform; import java.awt.geom.Area; import java.awt.geom.Ellipse2D; import java.awt.geom.RoundRectangle2D; import java.awt.image.BufferedImage; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.concurrent.atomic.AtomicReference; class EditorFragmentRenderer { static final int PREVIEW_LINES = Math.max(2, Math.min(25, Integer.getInteger("preview.lines", 5)));// Actually preview has myPreviewLines * 2 + 1 lines (above + below + current one) static final int EDITOR_FRAGMENT_POPUP_BORDER = 1; private static final int CACHE_PREVIEW_LINES = 100;// Actually cache image has myCachePreviewLines * 2 + 1 lines (above + below + current one) private final EditorImpl myEditor; private int myVisualLine; private int myStartVisualLine; private int myEndVisualLine; private int myRelativeY; private boolean myDelayed; private boolean isDirty; private final AtomicReference<Point> myPointHolder = new AtomicReference<>(); private final AtomicReference<HintHint> myHintHolder = new AtomicReference<>(); private @Nullable LightweightHint myEditorPreviewHint; EditorFragmentRenderer(EditorImpl editor) { myEditor = editor; } @Nullable LightweightHint getEditorPreviewHint() { return myEditorPreviewHint; } int getStartVisualLine() { return myStartVisualLine; } int getRelativeY() { return myRelativeY; } private void update(int visualLine, boolean showInstantly) { myVisualLine = visualLine; if (myVisualLine == -1) return; int oldStartLine = myStartVisualLine; int oldEndLine = myEndVisualLine; myStartVisualLine = EditorMarkupModelImpl.fitLineToEditor(myEditor, myVisualLine - PREVIEW_LINES); myEndVisualLine = EditorMarkupModelImpl.fitLineToEditor(myEditor, myVisualLine + PREVIEW_LINES); isDirty |= oldStartLine != myStartVisualLine || oldEndLine != myEndVisualLine; } public void show(int visualLine, @NotNull Collection<? extends RangeHighlighterEx> rangeHighlighters, boolean showInstantly, @NotNull HintHint hintInfo) { update(visualLine, showInstantly); ArrayList<? extends RangeHighlighterEx> highlighters = new ArrayList<>(rangeHighlighters); highlighters.sort((ex1, ex2) -> { LogicalPosition startPos1 = myEditor.offsetToLogicalPosition(ex1.getAffectedAreaStartOffset()); LogicalPosition startPos2 = myEditor.offsetToLogicalPosition(ex2.getAffectedAreaStartOffset()); if (startPos1.line != startPos2.line) return 0; return startPos1.column - startPos2.column; }); int contentInsets = JBUIScale.scale(2); // BalloonPopupBuilderImpl.myContentInsets final HintManagerImpl hintManager = HintManagerImpl.getInstanceImpl(); boolean needDelay = false; if (myEditorPreviewHint == null) { needDelay = true; final JPanel editorFragmentPreviewPanel = new EditorFragmentPreviewPanel(contentInsets, highlighters); editorFragmentPreviewPanel.putClientProperty(BalloonImpl.FORCED_NO_SHADOW, Boolean.TRUE); myEditorPreviewHint = new LightweightHint(editorFragmentPreviewPanel) { @Override public void hide(boolean ok) { super.hide(ok); myDelayed = false; } }; myEditorPreviewHint.setForceLightweightPopup(true); } Point point = new Point(hintInfo.getOriginalPoint()); hintInfo.setTextBg(myEditor.getBackgroundColor()); Color borderColor = myEditor.getColorsScheme().getAttributes(EditorColors.CODE_LENS_BORDER_COLOR).getEffectColor(); hintInfo.setBorderColor(borderColor != null ? borderColor : myEditor.getColorsScheme().getDefaultForeground()); point = SwingUtilities.convertPoint(myEditor.getVerticalScrollBar(), point, myEditor.getComponent().getRootPane()); myPointHolder.set(point); myHintHolder.set(hintInfo); if (needDelay && !showInstantly) { myDelayed = true; Alarm alarm = new Alarm(); alarm.addRequest(() -> { if (myEditorPreviewHint == null || !myDelayed) return; showEditorHint(hintManager, myPointHolder.get(), myHintHolder.get()); myDelayed = false; }, /*Registry.intValue("ide.tooltip.initialDelay")*/300); } else if (!myDelayed) { showEditorHint(hintManager, point, hintInfo); } } private void showEditorHint(@NotNull HintManagerImpl hintManager, @NotNull Point point, HintHint hintInfo) { int flags = HintManager.HIDE_BY_ANY_KEY | HintManager.HIDE_BY_TEXT_CHANGE | HintManager.HIDE_BY_MOUSEOVER | HintManager.HIDE_BY_ESCAPE | HintManager.HIDE_BY_SCROLLING; hintManager.showEditorHint(myEditorPreviewHint, myEditor, point, flags, 0, false, hintInfo); } void clearHint() { myEditorPreviewHint = null; } void hideHint() { if (myEditorPreviewHint != null) { myEditorPreviewHint.hide(); myEditorPreviewHint = null; } } private class EditorFragmentPreviewPanel extends JPanel { private static final int R = 6; private final int myContentInsets; private final List<? extends RangeHighlighterEx> myHighlighters; private @Nullable BufferedImage myCacheLevel1; private @Nullable BufferedImage myCacheLevel2; private int myCacheFromY; private int myCacheToY; private EditorFragmentPreviewPanel(int contentInsets, List<? extends RangeHighlighterEx> highlighters) { myContentInsets = contentInsets; myHighlighters = highlighters; } @DirtyUI @Override public @NotNull Dimension getPreferredSize() { int width = myEditor.getGutterComponentEx().getWidth() + myEditor.getScrollingModel().getVisibleArea().width - myEditor.getVerticalScrollBar().getWidth(); width -= JBUIScale.scale(EDITOR_FRAGMENT_POPUP_BORDER) * 2 + myContentInsets; return new Dimension(width - BalloonImpl.POINTER_LENGTH.get(), Math.min(2 * PREVIEW_LINES * myEditor.getLineHeight(), myEditor.visualLineToY(myEndVisualLine) - myEditor.visualLineToY(myStartVisualLine))); } @DirtyUI @Override protected void paintComponent(@NotNull Graphics g) { if (myVisualLine == -1 || myEditor.isDisposed()) return; Dimension size = getPreferredSize(); if (size.width <= 0 || size.height <= 0) return; EditorGutterComponentEx gutter = myEditor.getGutterComponentEx(); EditorComponentImpl content = myEditor.getContentComponent(); int gutterWidth = gutter.getWidth(); int lineHeight = myEditor.getLineHeight(); if (myCacheLevel2 != null && (myEditor.visualLineToY(myStartVisualLine) < myCacheFromY || myEditor.visualLineToY(myEndVisualLine) + lineHeight > myCacheToY)) { myCacheLevel2 = null; } if (myCacheLevel2 == null) { myCacheFromY = Math.max(0, myEditor.visualLineToY(myVisualLine) - CACHE_PREVIEW_LINES * lineHeight); myCacheToY = Math.min(myEditor.visualLineToY(myEditor.getVisibleLineCount()), myCacheFromY + (2 * CACHE_PREVIEW_LINES + 1) * lineHeight); myCacheLevel2 = ImageUtil.createImage(g, size.width, myCacheToY - myCacheFromY, BufferedImage.TYPE_INT_RGB); Graphics2D cg = myCacheLevel2.createGraphics(); final AffineTransform t = cg.getTransform(); EditorUIUtil.setupAntialiasing(cg); int lineShift = -myCacheFromY; int shift = JBUIScale.scale(EDITOR_FRAGMENT_POPUP_BORDER) + myContentInsets; AffineTransform gutterAT = AffineTransform.getTranslateInstance(-shift, lineShift); AffineTransform contentAT = AffineTransform.getTranslateInstance(gutterWidth - shift, lineShift); gutterAT.preConcatenate(t); contentAT.preConcatenate(t); EditorTextField.SUPPLEMENTARY_KEY.set(myEditor, Boolean.TRUE); try { cg.setTransform(gutterAT); cg.setClip(0, -lineShift, gutterWidth, myCacheLevel2.getHeight()); gutter.paint(cg); cg.setTransform(contentAT); cg.setClip(0, -lineShift, content.getWidth(), myCacheLevel2.getHeight()); content.paint(cg); } finally { EditorTextField.SUPPLEMENTARY_KEY.set(myEditor, null); } } if (myCacheLevel1 == null) { myCacheLevel1 = ImageUtil.createImage(g, size.width, lineHeight * (2 * PREVIEW_LINES + 1), BufferedImage.TYPE_INT_RGB); isDirty = true; } if (isDirty) { myRelativeY = SwingUtilities.convertPoint(this, 0, 0, myEditor.getScrollPane()).y; Graphics2D g2d = myCacheLevel1.createGraphics(); final AffineTransform transform = g2d.getTransform(); EditorUIUtil.setupAntialiasing(g2d); GraphicsUtil.setupAAPainting(g2d); g2d.setColor(myEditor.getBackgroundColor()); g2d.fillRect(0, 0, getWidth(), getHeight()); int topDisplayedY = Math.max(myEditor.visualLineToY(myStartVisualLine), myEditor.visualLineToY(myVisualLine) - PREVIEW_LINES * lineHeight); AffineTransform translateInstance = AffineTransform.getTranslateInstance(gutterWidth, myCacheFromY - topDisplayedY); translateInstance.preConcatenate(transform); g2d.setTransform(translateInstance); UIUtil.drawImage(g2d, myCacheLevel2, -gutterWidth, 0, null); TIntIntHashMap rightEdges = new TIntIntHashMap(); int h = lineHeight - 2; EditorColorsScheme colorsScheme = myEditor.getColorsScheme(); Font font = UIUtil.getFontWithFallback(colorsScheme.getEditorFontName(), Font.PLAIN, colorsScheme.getEditorFontSize()); g2d.setFont(font.deriveFont(font.getSize() * .8F)); for (RangeHighlighterEx ex : myHighlighters) { if (!ex.isValid()) continue; int hEndOffset = ex.getAffectedAreaEndOffset(); Object tooltip = ex.getErrorStripeTooltip(); if (tooltip == null) continue; String s = tooltip instanceof HighlightInfo ? ((HighlightInfo)tooltip).getDescription() : String.valueOf(tooltip); if (StringUtil.isEmpty(s)) continue; s = s.replaceAll("&nbsp;", " ").replaceAll("\\s+", " "); s = StringUtil.unescapeXmlEntities(s); LogicalPosition logicalPosition = myEditor.offsetToLogicalPosition(hEndOffset); int endOfLineOffset = myEditor.getDocument().getLineEndOffset(logicalPosition.line); logicalPosition = myEditor.offsetToLogicalPosition(endOfLineOffset); Point placeToShow = myEditor.logicalPositionToXY(logicalPosition); logicalPosition = myEditor.xyToLogicalPosition(placeToShow);//wraps&foldings workaround placeToShow.x += R * 3 / 2; placeToShow.y -= myCacheFromY - 1; int w = g2d.getFontMetrics().stringWidth(s); int rightEdge = rightEdges.get(logicalPosition.line); placeToShow.x = Math.max(placeToShow.x, rightEdge); rightEdge = Math.max(rightEdge, placeToShow.x + w + 3 * R); rightEdges.put(logicalPosition.line, rightEdge); g2d.setColor(MessageType.WARNING.getPopupBackground()); g2d.fillRoundRect(placeToShow.x, placeToShow.y, w + 2 * R, h, R, R); g2d.setColor(new JBColor(JBColor.GRAY, Gray._200)); g2d.drawRoundRect(placeToShow.x, placeToShow.y, w + 2 * R, h, R, R); g2d.setColor(JBColor.foreground()); g2d.drawString(s, placeToShow.x + R, placeToShow.y + h - g2d.getFontMetrics(g2d.getFont()).getDescent() / 2 - 2); } isDirty = false; } Graphics2D g2 = (Graphics2D)g.create(); try { GraphicsUtil.setupAAPainting(g2); g2.setClip(new RoundRectangle2D.Double(0, 0, size.width - .5, size.height - .5, 2, 2)); UIUtil.drawImage(g2, myCacheLevel1, 0, 0, this); if (StartupUiUtil.isUnderDarcula()) { //Add glass effect Shape s = new Rectangle(0, 0, size.width, size.height); double cx = size.width / 2.0; double rx = size.width / 10.0; int ry = lineHeight * 3 / 2; g2.setPaint(new GradientPaint(0, 0, Gray._255.withAlpha(75), 0, ry, Gray._255.withAlpha(10))); double pseudoMajorAxis = size.width - rx * 9 / 5; double cy = 0; Shape topShape1 = new Ellipse2D.Double(cx - rx - pseudoMajorAxis / 2, cy - ry, 2 * rx, 2 * ry); Shape topShape2 = new Ellipse2D.Double(cx - rx + pseudoMajorAxis / 2, cy - ry, 2 * rx, 2 * ry); Area topArea = new Area(topShape1); topArea.add(new Area(topShape2)); topArea.add(new Area(new Rectangle.Double(cx - pseudoMajorAxis / 2, cy, pseudoMajorAxis, ry))); g2.fill(topArea); Area bottomArea = new Area(s); bottomArea.subtract(topArea); g2.setPaint(new GradientPaint(0, size.height - ry, Gray._0.withAlpha(10), 0, size.height, Gray._255.withAlpha(30))); g2.fill(bottomArea); } } finally { g2.dispose(); } } } }
package org.jkiss.dbeaver.ui.search.data; import org.eclipse.jface.preference.IPreferenceStore; import org.eclipse.jface.viewers.*; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.SashForm; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.*; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.core.CoreMessages; import org.jkiss.dbeaver.core.DBeaverCore; import org.jkiss.dbeaver.model.navigator.*; import org.jkiss.dbeaver.model.struct.*; import org.jkiss.dbeaver.ui.UIUtils; import org.jkiss.dbeaver.ui.search.AbstractSearchPage; import org.jkiss.dbeaver.ui.views.navigator.database.CheckboxTreeManager; import org.jkiss.dbeaver.ui.views.navigator.database.DatabaseNavigatorTree; import org.jkiss.dbeaver.ui.views.navigator.database.load.TreeLoadNode; import org.jkiss.utils.CommonUtils; import java.util.ArrayList; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; public class SearchDataPage extends AbstractSearchPage { private static final String PROP_MASK = "search.data.mask"; //$NON-NLS-1$ private static final String PROP_CASE_SENSITIVE = "search.data.case-sensitive"; //$NON-NLS-1$ private static final String PROP_MAX_RESULT = "search.data.max-results"; //$NON-NLS-1$ private static final String PROP_FAST_SEARCH = "search.data.fast-search"; //$NON-NLS-1$ private static final String PROP_SEARCH_NUMBERS = "search.data.search-numbers"; //$NON-NLS-1$ private static final String PROP_SEARCH_LOBS = "search.data.search-lobs"; //$NON-NLS-1$ private static final String PROP_HISTORY = "search.data.history"; //$NON-NLS-1$ private static final String PROP_SOURCES = "search.data.object-source"; //$NON-NLS-1$ private Combo searchText; private DatabaseNavigatorTree dataSourceTree; private SearchDataParams params = new SearchDataParams(); private Set<String> searchHistory = new LinkedHashSet<String>(); private List<DBNNode> checkedNodes; private CheckboxTreeManager checkboxTreeManager; public SearchDataPage() { super("Database objects search"); } @Override public void createControl(Composite parent) { initializeDialogUnits(parent); Composite searchGroup = new Composite(parent, SWT.NONE); searchGroup.setLayoutData(new GridData(GridData.FILL_BOTH)); searchGroup.setLayout(new GridLayout(3, false)); setControl(searchGroup); UIUtils.createControlLabel(searchGroup, "String"); searchText = new Combo(searchGroup, SWT.DROP_DOWN); searchText.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); if (params.searchString != null) { searchText.setText(params.searchString); } for (String history : searchHistory) { searchText.add(history); } searchText.addModifyListener(new ModifyListener() { @Override public void modifyText(ModifyEvent e) { params.searchString = searchText.getText(); updateEnablement(); } }); Composite optionsGroup = new SashForm(searchGroup, SWT.NONE); GridLayout layout = new GridLayout(2, true); layout.marginHeight = 0; layout.marginWidth = 0; optionsGroup.setLayout(layout); GridData gd = new GridData(GridData.FILL_BOTH); gd.horizontalSpan = 3; optionsGroup.setLayoutData(gd); { final DBeaverCore core = DBeaverCore.getInstance(); Group databasesGroup = UIUtils.createControlGroup(optionsGroup, "Databases", 1, GridData.FILL_BOTH, 0); gd = new GridData(GridData.FILL_BOTH); gd.heightHint = 300; databasesGroup.setLayoutData(gd); final DBNProject projectNode = core.getNavigatorModel().getRoot().getProject(core.getProjectRegistry().getActiveProject()); DBNNode rootNode = projectNode == null ? core.getNavigatorModel().getRoot() : projectNode.getDatabases(); dataSourceTree = new DatabaseNavigatorTree(databasesGroup, rootNode, SWT.SINGLE | SWT.CHECK); dataSourceTree.setLayoutData(new GridData(GridData.FILL_BOTH)); final CheckboxTreeViewer viewer = (CheckboxTreeViewer) dataSourceTree.getViewer(); viewer.addFilter(new ViewerFilter() { @Override public boolean select(Viewer viewer, Object parentElement, Object element) { if (element instanceof TreeLoadNode) { return true; } if (element instanceof DBNNode) { if (element instanceof DBNDatabaseFolder) { DBNDatabaseFolder folder = (DBNDatabaseFolder) element; Class<? extends DBSObject> folderItemsClass = folder.getChildrenClass(); return folderItemsClass != null && (DBSObjectContainer.class.isAssignableFrom(folderItemsClass) || DBSEntity.class.isAssignableFrom(folderItemsClass)); } if (element instanceof DBNLocalFolder || element instanceof DBNProjectDatabases || element instanceof DBNDataSource || (element instanceof DBSWrapper && (((DBSWrapper) element).getObject() instanceof DBSObjectContainer) || ((DBSWrapper) element).getObject() instanceof DBSEntity)) { return true; } } return false; } }); checkboxTreeManager = new CheckboxTreeManager(viewer, new Class[]{DBSDataSearcher.class}); viewer.addCheckStateListener(checkboxTreeManager); viewer.addCheckStateListener(new ICheckStateListener() { @Override public void checkStateChanged(CheckStateChangedEvent event) { updateEnablement(); } }); } { //new Label(searchGroup, SWT.NONE); Composite optionsGroup2 = UIUtils.createControlGroup(optionsGroup, "Settings", 2, GridData.FILL_BOTH, 0); optionsGroup2.setLayoutData(new GridData(GridData.FILL_HORIZONTAL | GridData.HORIZONTAL_ALIGN_BEGINNING | GridData.VERTICAL_ALIGN_BEGINNING)); if (params.maxResults <= 0) { params.maxResults = 100; } final Spinner maxResultsSpinner = UIUtils.createLabelSpinner(optionsGroup2, CoreMessages.dialog_search_objects_spinner_max_results, params.maxResults, 1, 10000); maxResultsSpinner.setLayoutData(new GridData(GridData.HORIZONTAL_ALIGN_BEGINNING)); maxResultsSpinner.addModifyListener(new ModifyListener() { @Override public void modifyText(ModifyEvent e) { params.maxResults = maxResultsSpinner.getSelection(); } }); final Button caseCheckbox = UIUtils.createLabelCheckbox(optionsGroup2, CoreMessages.dialog_search_objects_case_sensitive, params.caseSensitive); caseCheckbox.setLayoutData(new GridData(GridData.HORIZONTAL_ALIGN_BEGINNING)); caseCheckbox.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { params.caseSensitive = caseCheckbox.getSelection(); } }); final Button fastSearchCheckbox = UIUtils.createLabelCheckbox(optionsGroup2, "Fast search (indexed)", params.fastSearch); fastSearchCheckbox.setLayoutData(new GridData(GridData.HORIZONTAL_ALIGN_BEGINNING)); fastSearchCheckbox.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { params.fastSearch = fastSearchCheckbox.getSelection(); } }); final Button searchNumbersCheckbox = UIUtils.createLabelCheckbox(optionsGroup2, "Search in numbers", params.searchNumbers); searchNumbersCheckbox.setLayoutData(new GridData(GridData.HORIZONTAL_ALIGN_BEGINNING)); searchNumbersCheckbox.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { params.searchNumbers = searchNumbersCheckbox.getSelection(); } }); final Button searchLOBCheckbox = UIUtils.createLabelCheckbox(optionsGroup2, "Search in LOBs", params.searchLOBs); searchLOBCheckbox.setLayoutData(new GridData(GridData.HORIZONTAL_ALIGN_BEGINNING)); searchLOBCheckbox.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { params.searchLOBs = searchNumbersCheckbox.getSelection(); } }); } if (!checkedNodes.isEmpty()) { for (DBNNode node : checkedNodes) { ((CheckboxTreeViewer)dataSourceTree.getViewer()).setChecked(node, true); checkboxTreeManager.checkStateChanged(new CheckStateChangedEvent((CheckboxTreeViewer)dataSourceTree.getViewer(), node, true)); } } updateEnablement(); } @Override public SearchDataQuery createQuery() throws DBException { params.sources = getCheckedSources(); // Save search query if (!searchHistory.contains(params.searchString)) { searchHistory.add(params.searchString); searchText.add(params.searchString); } return SearchDataQuery.createQuery(params); } @Override public void loadState(IPreferenceStore store) { params.searchString = store.getString(PROP_MASK); params.caseSensitive = store.getBoolean(PROP_CASE_SENSITIVE); params.fastSearch = store.getBoolean(PROP_FAST_SEARCH); params.searchNumbers = store.getBoolean(PROP_SEARCH_NUMBERS); params.searchLOBs = store.getBoolean(PROP_SEARCH_LOBS); params.maxResults = store.getInt(PROP_MAX_RESULT); for (int i = 0; ;i++) { String history = store.getString(PROP_HISTORY + "." + i); //$NON-NLS-1$ if (CommonUtils.isEmpty(history)) { break; } searchHistory.add(history); } checkedNodes = loadTreeState(store, PROP_SOURCES); } @Override public void saveState(IPreferenceStore store) { store.setValue(PROP_MASK, params.searchString); store.setValue(PROP_CASE_SENSITIVE, params.caseSensitive); store.setValue(PROP_MAX_RESULT, params.maxResults); store.setValue(PROP_FAST_SEARCH, params.fastSearch); store.setValue(PROP_SEARCH_NUMBERS, params.searchNumbers); store.setValue(PROP_SEARCH_LOBS, params.searchLOBs); saveTreeState(store, PROP_SOURCES, dataSourceTree); { // Search history int historyIndex = 0; for (String history : searchHistory) { if (historyIndex >= 20) { break; } store.setValue(PROP_HISTORY + "." + historyIndex, history); //$NON-NLS-1$ historyIndex++; } } } protected List<DBSObject> getCheckedSources() { List<DBSObject> result = new ArrayList<DBSObject>(); for (Object sel : ((CheckboxTreeViewer)dataSourceTree.getViewer()).getCheckedElements()) { if (sel instanceof DBSWrapper) { DBSObject object = ((DBSWrapper) sel).getObject(); if (object != null && object.getDataSource() != null) { result.add(object); } } } return result; } protected void updateEnablement() { boolean enabled = false; if (!getCheckedSources().isEmpty()) { enabled = true; } container.setSearchEnabled(enabled); } }
package org.osiam.resources.provisioning.update; import java.util.HashSet; import java.util.Set; import javax.inject.Inject; import org.osiam.resources.scim.Group; import org.osiam.resources.scim.MemberRef; import org.osiam.storage.dao.ResourceDao; import org.osiam.storage.entities.GroupEntity; import org.osiam.storage.entities.ResourceEntity; import org.springframework.stereotype.Service; @Service public class GroupUpdater { @Inject private ResourceUpdater resourceUpdater; @Inject private ResourceDao resourceDao; public void update(Group group, GroupEntity groupEntity) { resourceUpdater.update(group, groupEntity); Set<String> attributes = new HashSet<>(); if (group.getMeta() != null && group.getMeta().getAttributes() != null) { attributes = group.getMeta().getAttributes(); } if (group.getDisplayName() != null && !group.getDisplayName().isEmpty()) { groupEntity.setDisplayName(group.getDisplayName()); } updateMembers(group, groupEntity, attributes); } private void updateMembers(Group group, GroupEntity groupEntity, Set<String> attributes) { String attributeName = "members"; for (String attribute : attributes) { if (attribute.equalsIgnoreCase(attributeName)) { groupEntity.removeAllMembers(); } } if (group.getMembers() != null) { for (MemberRef memberRef : group.getMembers()) { String memberId = memberRef.getValue(); if (memberRef.getOperation() != null && memberRef.getOperation().equalsIgnoreCase("delete")) { ResourceEntity member = getMember(memberId, groupEntity); if(member != null) { groupEntity.removeMember(member); } } else { // TODO: what to do if member does not exist? ResourceEntity member = resourceDao.getById(memberId, ResourceEntity.class); groupEntity.addMember(member); } } } } private ResourceEntity getMember(String memberId, GroupEntity groupEntity) { Set<ResourceEntity> members = groupEntity.getMembers(); for (ResourceEntity member : members) { if(member.getId().toString().equals(memberId)) { return member; } } return null; } }
package org.jetbrains.plugins.ruby.ruby.actions; import com.intellij.execution.ExecutionException; import com.intellij.execution.ExecutionManager; import com.intellij.execution.Executor; import com.intellij.execution.RunContentExecutor; import com.intellij.execution.configurations.GeneralCommandLine; import com.intellij.execution.executors.DefaultRunExecutor; import com.intellij.execution.process.KillableColoredProcessHandler; import com.intellij.execution.ui.ConsoleView; import com.intellij.execution.ui.ConsoleViewContentType; import com.intellij.execution.ui.RunContentDescriptor; import com.intellij.execution.util.ExecUtil; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.SystemInfoRt; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.execution.ParametersListUtil; import icons.RubyIcons; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.ruby.RBundle; import org.jetbrains.plugins.ruby.remote.RubyRemoteInterpreterManager; import org.jetbrains.plugins.ruby.ruby.RModuleUtil; import org.jetbrains.plugins.ruby.ruby.run.RubyAbstractRunner; import org.jetbrains.plugins.ruby.rvm.RVMSupportUtil; import org.jetbrains.plugins.ruby.utils.OSUtil; import org.jetbrains.plugins.ruby.version.management.rbenv.gemsets.RbenvGemsetManager; import javax.swing.*; import java.awt.*; import java.io.File; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Objects; public class RunAnythingUndefinedItem extends RunAnythingItem<String> { @Nullable private final Module myModule; @NotNull private final String myCommandLine; @NotNull private final Project myProject; static final Icon UNDEFINED_COMMAND_ICON = RubyIcons.RunAnything.Run_anything; public RunAnythingUndefinedItem(@NotNull Project project, @Nullable Module module, @NotNull String commandLine) { myProject = project; myModule = module; myCommandLine = commandLine; } @Override public void run(@NotNull Executor executor, @Nullable VirtualFile workDirectory, @Nullable Component component, @NotNull Project project, @Nullable AnActionEvent event) { runCommand(workDirectory, project, myCommandLine, myModule, executor); } public static void runCommand(@Nullable VirtualFile workDirectory, @NotNull Project project, @NotNull String commandString, @Nullable Module module, @NotNull Executor executor) { Collection<String> commands = RunAnythingCache.getInstance(project).getState().undefinedCommands; commands.remove(commandString); commands.add(commandString); Sdk sdk = RModuleUtil.getInstance().findRubySdkForModule(module); GeneralCommandLine commandLine = new GeneralCommandLine().withParentEnvironmentType(GeneralCommandLine.ParentEnvironmentType.CONSOLE); String command = commandString; Map<String, String> env = ContainerUtil.newHashMap(commandLine.getEffectiveEnvironment()); if (sdk != null && !RubyRemoteInterpreterManager.getInstance().isRemoteSdk(sdk)) { if (RVMSupportUtil.isRVMInterpreter(sdk)) { command = getRVMAwareCommand(sdk, commandString, project); } else if (RbenvGemsetManager.isRbenvSdk(sdk)) { command = getRbenvAwareCommand(sdk, env, commandString, project, module); } else { command = getRubyAwareCommand(sdk, env, commandString); } } List<String> shellCommand = getShellCommand(); commandLine .withExePath(shellCommand.size() > 0 ? shellCommand.get(0) : ParametersListUtil.parse(command).get(0)) .withParameters(shellCommand.size() == 2 ? ContainerUtil.immutableSingletonList(shellCommand.get(1)) : ContainerUtil.emptyList()) .withParameters(command) .withEnvironment(env) .withWorkDirectory(RunAnythingItem.getActualWorkDirectory(project, workDirectory)); runInConsole(commandLine, project); } private static void runInConsole(@NotNull GeneralCommandLine commandLine, @NotNull Project project) { try { KillableColoredProcessHandler processHandler = new KillableColoredProcessHandler(commandLine) { @Override protected void notifyProcessTerminated(int exitCode) { RunContentDescriptor contentDescriptor = ExecutionManager.getInstance(project).getContentManager() .findContentDescriptor(DefaultRunExecutor.getRunExecutorInstance(), this); if (contentDescriptor != null && contentDescriptor.getExecutionConsole() instanceof ConsoleView) { ((ConsoleView)contentDescriptor.getExecutionConsole()) .print(RBundle.message("run.anything.console.process.finished", exitCode), ConsoleViewContentType.SYSTEM_OUTPUT); } super.notifyProcessTerminated(exitCode); } }; final RunContentExecutor contentExecutor = new RunContentExecutor(project, processHandler) .withTitle(RBundle.message("run.anything.console.title")) .withStop(processHandler::destroyProcess, () -> !processHandler.isProcessTerminated()) .withActivateToolWindow(true); ApplicationManager.getApplication().invokeLater(() -> { if (!project.isDisposed()) { contentExecutor.run(); } }); } catch (ExecutionException e) { Messages.showInfoMessage(project, e.getMessage(), RBundle.message("run.anything.console.error.title")); } } @NotNull private static List<String> getShellCommand() { if (SystemInfoRt.isWindows) return ContainerUtil.immutableList(ExecUtil.getWindowsShellName(), "/c"); String shell = System.getenv("SHELL"); return shell == null || !new File(shell).canExecute() ? ContainerUtil.emptyList() : ContainerUtil.immutableList(shell, "-c"); } private static String getRubyAwareCommand(@NotNull Sdk sdk, @NotNull Map<String, String> env, @NotNull String commandLine) { VirtualFile sdkHomeDirectory = sdk.getHomeDirectory(); if (sdkHomeDirectory == null) return commandLine; VirtualFile parent = sdkHomeDirectory.getParent(); if (parent == null) return commandLine; final String path = FileUtil.toSystemDependentName(parent.getPath()); final String envName = OSUtil.getPathEnvVariableName(); final String newPath = OSUtil.prependToPathEnvVariable(env.get(envName), path); env.put(envName, newPath); return commandLine; } private static String getRbenvAwareCommand(@NotNull Sdk sdk, @NotNull Map<String, String> env, @NotNull String commandLine, @NotNull Project project, @Nullable Module module) { String exeCommand = commandLine.contains(" ") ? StringUtil.substringBefore(commandLine, " ") : commandLine; String shimsExec = RbenvGemsetManager.getShimsCommandPath(Objects.requireNonNull(exeCommand)); if (shimsExec == null || !RunAnythingCache.getInstance(project).CAN_RUN_RBENV) return commandLine; RubyAbstractRunner.patchRbenvEnv(env, module, sdk); return shimsExec + (commandLine.contains(" ") ? " " + StringUtil.substringAfter(commandLine, " ") : ""); } @NotNull private static String getRVMAwareCommand(@NotNull Sdk sdk, @NotNull String commandLine, @NotNull Project project) { if (commandLine.startsWith("rvm ")) return commandLine; String version = RVMSupportUtil.getRVMSdkVersion(sdk); String gemset = RVMSupportUtil.getGemset(sdk); if (version == null) return commandLine; if (gemset != null) version += '@' + gemset; if (!RunAnythingCache.getInstance(project).CAN_RUN_RVM) return commandLine; return "rvm " + version + " do " + commandLine; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RunAnythingUndefinedItem item = (RunAnythingUndefinedItem)o; return Objects.equals(myModule, item.myModule) && Objects.equals(myCommandLine, item.myCommandLine) && Objects.equals(myProject, item.myProject); } @Override public int hashCode() { return Objects.hash(myModule, myCommandLine, myProject); } @NotNull @Override public String getText() { return myCommandLine; } @NotNull @Override public Icon getIcon() { return UNDEFINED_COMMAND_ICON; } @NotNull @Override public String getValue() { return myCommandLine; } @NotNull @Override public Component getComponent(boolean isSelected) { return RunAnythingUtil.getUndefinedCommandCellRendererComponent(this, isSelected); } }
package fi.nls.oskari.map.layer.formatters; import fi.nls.oskari.domain.map.OskariLayer; import fi.nls.oskari.domain.map.wfs.WFSLayerAttributes; import fi.nls.oskari.domain.map.wfs.WFSLayerOptions; import fi.nls.oskari.log.LogFactory; import fi.nls.oskari.log.Logger; import fi.nls.oskari.map.geometry.WKTHelper; import fi.nls.oskari.service.ServiceException; import fi.nls.oskari.service.capabilities.CapabilitiesConstants; import fi.nls.oskari.util.JSONHelper; import fi.nls.oskari.wfs.*; import org.eclipse.emf.ecore.EObject; import org.geotools.data.ResourceInfo; import org.geotools.data.simple.SimpleFeatureSource; import org.geotools.data.wfs.internal.WFSGetCapabilities; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.referencing.CRS; import org.json.JSONArray; import org.json.JSONObject; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.GeometryDescriptor; import org.oskari.service.wfs3.WFS3Service; import java.util.Collections; import java.util.Set; import static fi.nls.oskari.service.capabilities.CapabilitiesConstants.*; public class LayerJSONFormatterWFS extends LayerJSONFormatter { private static Logger log = LogFactory.getLogger(LayerJSONFormatterWFS.class); public JSONObject getJSON(OskariLayer layer, final String lang, final boolean isSecure, final String crs) { final JSONObject layerJson = getBaseJSON(layer, lang, isSecure, crs); setStyles(layerJson, new WFSLayerOptions(layer.getOptions())); // Use maplayer setup if(layer.getStyle() == null || layer.getStyle().isEmpty() ){ JSONHelper.putValue(layerJson, KEY_STYLE, "default"); } else { JSONHelper.putValue(layerJson, KEY_STYLE, layer.getStyle()); } JSONHelper.putValue(layerJson, KEY_ISQUERYABLE, true); return layerJson; } /** * Constructs a style json * */ protected void setStyles(JSONObject layerJson, WFSLayerOptions wfsOpts) { JSONHelper.putValue(layerJson, KEY_STYLES, wfsOpts.getStyles().names()); } public static JSONObject createCapabilitiesJSON(final WFSGetCapabilities capa, SimpleFeatureSource source, Set<String> systemCRSs) throws ServiceException { try { JSONObject json; EObject parsedCapa = capa.getParsedCapabilities(); if (parsedCapa instanceof net.opengis.wfs20.WFSCapabilitiesType) { json = WFSCapabilitiesParser200.parse( (net.opengis.wfs20.WFSCapabilitiesType) parsedCapa); } else if (parsedCapa instanceof net.opengis.wfs.WFSCapabilitiesType) { json = WFSCapabilitiesParser110.parse( (net.opengis.wfs.WFSCapabilitiesType) parsedCapa); } else { throw new ServiceException("Invalid WFSCapabilitiesType"); } // parse for version 1.x // TODO: 2.0.0 or newer doesn't work with this so content-editor etc will not work with those // Schema is used only to parse geometry property name // skip if failed to get schema or can't find default geometry property try { SimpleFeatureType sft = source.getSchema(); JSONHelper.putValue(json, CapabilitiesConstants.KEY_NAMESPACE_URL, sft.getName().getNamespaceURI()); GeometryDescriptor geom = sft.getGeometryDescriptor(); if (geom != null) { JSONHelper.putValue(json, CapabilitiesConstants.KEY_GEOM_NAME, geom.getLocalName()); } // TODO: else sft.getTypes().filter(known geom types) } catch (Exception e) { log.info("Unable to parse namespace url or geometry field name from schema:", e.getMessage()); } ResourceInfo info = source.getInfo(); // TODO is there more than default crs String crs = CRS.lookupIdentifier(info.getCRS(), true); if (crs != null) { Set<String> crss = getCRSsToStore(systemCRSs, Collections.singleton(crs)); JSONHelper.put(json, KEY_SRS, new JSONArray(crss)); } ReferencedEnvelope bbox = info.getBounds(); if (bbox != null) { bbox = bbox.transform(WKTHelper.CRS_EPSG_4326, true); boolean coversWholeWorld = bbox.getMinX() <= -180 && bbox.getMinY() <= -90 && bbox.getMaxX() >= 180 && bbox.getMaxY() >= 90; if (!coversWholeWorld) { // no need to attach coverage if it covers the whole world as it's not useful info String wkt = WKTHelper.getBBOX(bbox.getMinX(), bbox.getMinY(), bbox.getMaxX(), bbox.getMaxY()); JSONHelper.putValue(json,KEY_LAYER_COVERAGE, wkt); } } Set<String> keywords = info.getKeywords(); JSONHelper.putValue(json, KEY_KEYWORDS, new JSONArray(keywords)); return json; } catch (Exception e) { throw new ServiceException( "Failed to create capabilities json: " + e.getMessage()); } } public static JSONObject createCapabilitiesJSON (WFS3Service service, String collectionId, Set<String> systemCRSs) { JSONObject capabilities = new JSONObject(); // override Set<String> crsUri = service.getSupportedCrsURIs(collectionId); JSONHelper.put(capabilities, KEY_CRS_URI, new JSONArray(crsUri)); Set<String> capabilitiesCRSs = service.getSupportedEpsgCodes(collectionId); Set<String> crss = getCRSsToStore(systemCRSs, capabilitiesCRSs); JSONHelper.putValue(capabilities, KEY_SRS, new JSONArray(crss)); Set<String> formats = service.getSupportedFormats(collectionId); JSONHelper.put(capabilities, KEY_FEATURE_OUTPUT_FORMATS, new JSONArray(formats)); return capabilities; } }
package fi.nls.oskari.map.myplaces.service; import fi.nls.oskari.domain.User; import fi.nls.oskari.log.LogFactory; import fi.nls.oskari.log.Logger; import fi.nls.oskari.map.data.service.GetGeoPointDataService; import fi.nls.oskari.map.myplaces.domain.ProxyRequest; import fi.nls.oskari.permission.PermissionException; import fi.nls.oskari.util.IOHelper; import fi.nls.oskari.util.PropertyUtil; import org.apache.xerces.parsers.DOMParser; import org.json.JSONException; import org.json.JSONObject; import org.jsoup.Jsoup; import org.jsoup.safety.Whitelist; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import javax.xml.xpath.*; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.util.zip.GZIPInputStream; public class GeoServerProxyService { private final static Logger log = LogFactory.getLogger(GeoServerProxyService.class); private final static String WFS_INSERT = "wfs:Insert"; private final static String WFS_DELETE = "wfs:Delete"; private final static String WFS_UPDATE = "wfs:Update"; private final static String WFS_FEATURECOLLECTION = "wfs:FeatureCollection"; private final static String FEATURE_UUID = "feature:uuid"; private final static String OGC_FEATURE_ID = "ogc:FeatureId"; private final static String FID = "fid"; private final static String OGC_LITERAL = "ogc:Literal"; private final static String OWS_UUID = "ows:uuid"; private static final String MY_PLACE_FEATURE_FILTER_XML = "GetFeatureInfoMyPlaces.xml"; private static final String MY_PLACE_FEATURE_FILTER_XSL = "GetFeatureInfoMyPlaces.xsl"; private static final String POST_REQUEST = "POST"; private static final String XML_VERSION_TAG= "<?xml version=\"1.0\"?>\r\n"; private static final String GEOSERVER_URL_PARAMS = "service=WFS&version=1.0.0&request=GetFeature&maxFeatures=50"+ "&outputFormat=text/xml;%20subtype=gml/3.1.1&typeName=ows:"; private static final String URLPARAM_FEATURE = "&FEATUREID="; private static final int DISTANCE_FACTOR = 5; private static final int MAX_ZOOM_LEVEL = 12; private static HttpURLConnection getConnection() throws IOException { final String myPlacesUrl = PropertyUtil.get("myplaces.ows.url"); return getConnection(myPlacesUrl); } private static HttpURLConnection getConnection(final String url) throws IOException { // myplaces needs geoserver auth final String myplacesUser = PropertyUtil.get("myplaces.user"); final String myplacesUserPass = PropertyUtil.get("myplaces.password"); return IOHelper.getConnection(url, myplacesUser, myplacesUserPass); } public String proxy(final ProxyRequest request, final User user) throws MalformedURLException, IOException, PermissionException { // check that the users UUID matches the one in POST data XML if (POST_REQUEST.equals(request.getMethod())) { final StringBuffer sb = new StringBuffer(); sb.append(XML_VERSION_TAG); sb.append(request.getPostData()); log.debug("Posted XML:", sb.toString()); final String uuid = Jsoup.clean(getUUIDfromXml(sb), Whitelist.none()); if (!uuid.equals(user.getUuid())) { throw new PermissionException("UUID didn't match with XML"); } } final HttpURLConnection con = IOHelper.getConnection(request.getUrl() + request.getParamsAsQueryString(), request.getUserName(), request.getPassword()); IOHelper.writeHeaders(con, request.getHeaders()); try { if (POST_REQUEST.equals(request.getMethod())) { /* con.setRequestMethod(request.getMethod()); con.setDoOutput(true); con.setDoInput(true); */ HttpURLConnection.setFollowRedirects(false); con.setUseCaches(false); con.setRequestProperty("Content-type", "text/xml; charset=UTF-8"); //con.connect(); IOHelper.writeToConnection(con, request.getPostData()); GZIPInputStream gis = new GZIPInputStream(con.getInputStream()); return IOHelper.readString(gis); } else { return IOHelper.readString(con.getInputStream()); } } finally { con.disconnect(); } } private static String getUUIDfromXml(final StringBuffer sb) throws IOException { InputStream is = new ByteArrayInputStream(sb.toString().getBytes("UTF-8")); InputSource inputSource = new InputSource(is); DOMParser p = new DOMParser(); try { p.parse(inputSource); } catch (SAXException e) { throw new IOException("Error parsing XML", e); }finally { is.close(); } Document doc = p.getDocument(); String uuidInXml = ""; NodeList insertCommandNode = doc.getElementsByTagName(WFS_INSERT); NodeList deleteCommandNode = doc.getElementsByTagName(WFS_DELETE); NodeList updateCommandNode = doc.getElementsByTagName(WFS_UPDATE); if (insertCommandNode.getLength() > 0) { NodeList uuidNode = doc.getElementsByTagName(FEATURE_UUID); uuidInXml = uuidNode.item(0).getTextContent(); uuidInXml = Jsoup.clean(uuidInXml, Whitelist.none()); } else if (deleteCommandNode.getLength() > 0 || updateCommandNode.getLength() > 0) { NodeList featureIdNode = doc.getElementsByTagName(OGC_FEATURE_ID); String featureId = featureIdNode.item(0).getAttributes() .getNamedItem(FID).getTextContent(); featureId = Jsoup.clean(featureId, Whitelist.none()); try { uuidInXml = getUuidFromGeoserver(featureId); } catch (SAXException e) { throw new IOException("Error parsing XML", e); } uuidInXml = Jsoup.clean(uuidInXml, Whitelist.none()); } else if (doc.getElementsByTagName(OGC_LITERAL).getLength() > 0) { uuidInXml = doc.getElementsByTagName(OGC_LITERAL).item(0) .getTextContent(); uuidInXml = Jsoup.clean(uuidInXml, Whitelist.none()); } return uuidInXml; } public static String getUuidFromGeoserver(String featureId) throws IOException, SAXException { String uuidInXml = ""; InputStream is2 = null; HttpURLConnection geoserverCon = getGeoserverConnection(featureId); if (geoserverCon == null) throw new IOException("Could not get connection to GeoServer"); try{ is2 = geoserverCon.getInputStream(); DOMParser p2 = new DOMParser(); InputSource inputSource2 = new InputSource(is2); p2.parse(inputSource2); Document docu = p2.getDocument(); NodeList responseUUIDNode = docu.getElementsByTagName(OWS_UUID); uuidInXml = responseUUIDNode.item(0).getTextContent(); uuidInXml = Jsoup.clean(uuidInXml, Whitelist.none()); }finally { geoserverCon.disconnect(); if (is2 != null ) is2.close(); } return uuidInXml; } public static HttpURLConnection getGeoserverConnection(String featureId) throws IOException, SAXException { featureId = Jsoup.clean(featureId, Whitelist.none()); int dotIdx = featureId.indexOf('.'); if (dotIdx < 0) throw new IOException("Could not connect. No feature type in '" + featureId + "'"); String typeName = featureId.substring(0, dotIdx); String myPlacesUrl = PropertyUtil.get("myPlacesUrl"); myPlacesUrl = Jsoup.clean(myPlacesUrl, Whitelist.none()); String geoserverAddress = myPlacesUrl + GEOSERVER_URL_PARAMS + typeName + URLPARAM_FEATURE + featureId; return getConnection(geoserverAddress); } public JSONObject getFeatureInfo(final double lat, final double lon, final int zoom, final String id, final String uuid) { HttpURLConnection connection = null; InputStream respInStream = null; InputStream xsltInStream = null; try { final String categoryId = id.substring(id.indexOf("_")+1); connection = getConnection(); connection.setDoOutput(true); connection.setRequestProperty("Content-type", "application/xml"); OutputStream outs = connection.getOutputStream(); buildQueryToStream(MY_PLACE_FEATURE_FILTER_XML, lon +" " +lat, zoom, categoryId, uuid, outs); outs.flush(); outs.close(); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setValidating(false); factory.setNamespaceAware(true); DocumentBuilder builder = factory.newDocumentBuilder(); respInStream = connection.getInputStream(); org.w3c.dom.Document document = builder.parse(respInStream); xsltInStream = this.getClass().getResourceAsStream(MY_PLACE_FEATURE_FILTER_XSL); StreamSource stylesource = new StreamSource(xsltInStream); String nof = document.getElementsByTagName(WFS_FEATURECOLLECTION).item(0).getAttributes().getNamedItem("numberOfFeatures").getTextContent(); if (!"0".equals(nof)) { String transformedResponse = GetGeoPointDataService.getFormatedJSONString(document, stylesource); JSONObject response = new JSONObject(); response.put(GetGeoPointDataService.TYPE, "wmslayer"); response.put(GetGeoPointDataService.LAYER_ID, id); response.put(GetGeoPointDataService.PRESENTATION_TYPE, "JSON"); response.put(GetGeoPointDataService.CONTENT, new JSONObject(transformedResponse)); return response; } } catch (IOException e) { log.error("IOException when trying do wfs query for my places", e); } catch (XPathExpressionException e) { log.error("XPathExpressionException when trying do wfs query for my places", e); } catch (ParserConfigurationException e) { log.error("ParserConfigurationException when trying do wfs query for my places", e); } catch (SAXException e) { log.error("SAXException when trying do wfs query for my places", e); } catch (TransformerException e) { log.error("TransformerException when trying do wfs query for my places", e); } catch (JSONException e) { log.error("JSONException when trying do wfs query for my places", e); } finally { try{ if(respInStream != null) respInStream.close(); } catch ( IOException ex) { log.warn("Unable to close InputStream ", ex); } try { if(xsltInStream != null) xsltInStream.close(); } catch (IOException ex) { log.warn("Unable to close InputStream ", ex); } if(connection != null){ connection.disconnect(); } } return null; } private String buildQueryToStream(String resourceName, String lon_lat, int zoomLevel, String categoryId, String uuid, OutputStream outs) throws ParserConfigurationException, SAXException, IOException, XPathExpressionException, TransformerException { MyPlacesNamespaceContext nscontext = new MyPlacesNamespaceContext(); /** * 1) Read Query Template */ DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); DocumentBuilder builder; Document doc = null; builder = factory.newDocumentBuilder(); InputStream inp = this.getClass().getResourceAsStream(resourceName); doc = builder.parse(inp); inp.close(); // Create a XPathFactory XPathFactory xFactory = XPathFactory.newInstance(); // Create a XPath object XPath xpath = xFactory.newXPath(); xpath.setNamespaceContext(nscontext); XPath xpath2 = xFactory.newXPath(); xpath2.setNamespaceContext(nscontext); XPath xpath3 = xFactory.newXPath(); xpath3.setNamespaceContext(nscontext); XPath xpath4 = xFactory.newXPath(); xpath4.setNamespaceContext(nscontext); // Compile the XPath expression XPathExpression expr = xpath .compile("//gml:pos[.='{LON_LAT}']"); Node nd = (Node) expr.evaluate(doc, XPathConstants.NODE); nd.setTextContent(lon_lat); XPathExpression expr2 = xpath2 .compile("//ogc:Literal[.='{CATEGORY_ID}']"); Node nd2 = (Node) expr2.evaluate(doc, XPathConstants.NODE); nd2.setTextContent(categoryId); XPathExpression expr3 = xpath3 .compile("//ogc:Literal[.='{UUID}']"); Node nd3 = (Node) expr3.evaluate(doc, XPathConstants.NODE); nd3.setTextContent(uuid); XPathExpression expr4 = xpath4 .compile("//ogc:Distance[.='{DISTANCE}']"); Node nd4 = (Node) expr4.evaluate(doc, XPathConstants.NODE); nd4.setTextContent(String.valueOf(DISTANCE_FACTOR*Math.pow(2,(MAX_ZOOM_LEVEL-zoomLevel)))); // Use a Transformer for output TransformerFactory tFactory = TransformerFactory.newInstance(); Transformer transformer = tFactory.newTransformer(); DOMSource source = new DOMSource(doc); StreamResult result = new StreamResult(outs); transformer.transform(source, result); return outs.toString(); } }
package fi.nls.oskari.spring.security; import fi.nls.oskari.log.LogFactory; import fi.nls.oskari.log.Logger; import fi.nls.oskari.spring.SpringEnvHelper; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; import org.springframework.core.annotation.Order; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; /** * Common security config for logging out. */ @Configuration @EnableWebSecurity @Order(2) public class OskariCommonSecurityConfig extends WebSecurityConfigurerAdapter { private Logger log = LogFactory.getLogger(OskariCommonSecurityConfig.class); @Autowired private SpringEnvHelper env; protected void configure(HttpSecurity http) throws Exception { log.info("Configuring common security options"); final String logoutUrl = env.getLogoutUrl(); http .headers().frameOptions().disable() .and() // IMPORTANT! Only antMatch for logoutUrl, otherwise SAML security filters are passed even if active // also server-extensions with custom config are affected // FIXME: When we want to use SAML singleLogout, we should disable this and call /saml/SingleLogout .antMatcher(logoutUrl) // NOTE! With CSRF enabled logout needs to happen with POST request .logout() .logoutUrl(logoutUrl) .invalidateHttpSession(true) .deleteCookies("oskaristate","JSESSIONID", "CSRF-TOKEN") .logoutSuccessUrl("/"); } }
package org.slc.sli.ingestion.model; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.UUID; import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.mapping.Document; import org.slc.sli.ingestion.BatchJobStageType; import org.slc.sli.util.performance.PutResultInContext; /** * Model for ingestion jobs. * * @author dduran * */ @Document public final class NewBatchJob { @Id private String id; private String sourceId; private String status; private int totalFiles; private Map<String, String> batchProperties; private List<Stage> stages; private List<ResourceEntry> resourceEntries; // mongoTemplate requires this constructor. public NewBatchJob() { } public NewBatchJob(String id) { this.id = id; List<Stage> stages = new LinkedList<Stage>(); this.stages = stages; List<ResourceEntry> resourceEntries = new LinkedList<ResourceEntry>(); this.resourceEntries = resourceEntries; } public NewBatchJob(String id, String sourceId, String status, int totalFiles, Map<String, String> batchProperties, List<Stage> stages, List<ResourceEntry> resourceEntries) { super(); this.id = id; this.sourceId = sourceId; this.status = status; this.totalFiles = totalFiles; if (batchProperties == null) { batchProperties = new HashMap<String, String>(); } this.batchProperties = batchProperties; if (stages == null) { stages = new LinkedList<Stage>(); } this.stages = stages; if (resourceEntries == null) { resourceEntries = new LinkedList<ResourceEntry>(); } this.resourceEntries = resourceEntries; } /** * generates a new unique ID */ @PutResultInContext(returnName = "ingestionBatchJobId") public static String createId(String filename) { if (filename == null) { return System.currentTimeMillis() + "-" + UUID.randomUUID().toString(); } else { return filename + "-" + System.currentTimeMillis() + "-" + UUID.randomUUID().toString(); } } public String getSourceId() { return sourceId; } public void setSourceId(String sourceId) { this.sourceId = sourceId; } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public int getTotalFiles() { return totalFiles; } public void setTotalFiles(int totalFiles) { this.totalFiles = totalFiles; } public Map<String, String> getBatchProperties() { return batchProperties; } public String getProperty(String key) { return batchProperties.get(key); } public String getProperty(String key, String defaultValue) { String value = batchProperties.get(key); if (value == null) { value = defaultValue; } return value; } public void setBatchProperties(Map<String, String> batchProperties) { this.batchProperties = batchProperties; } public List<Stage> getStages() { return stages; } public List<ResourceEntry> getResourceEntries() { return resourceEntries; } public String getId() { return id; } /** * Method to return the ResourceEntry for a given resourceId * returns null if no matching entry is found * * @param resourceId */ public ResourceEntry getResourceEntry(String resourceId) { if (resourceId != null) { for (ResourceEntry entry : this.getResourceEntries()) { if (resourceId.equals(entry.getResourceId())) { return entry; } } } else { throw new IllegalArgumentException("Cannot get resource for null resourceId"); } return null; } /** * Method to return the List of metrics for a given stageType * returns null if no matching metrics are found * * @param stageType */ public List<Metrics> getStageMetrics(BatchJobStageType stageType) { for (Stage stage : this.getStages()) { if (stageType.getName().equals(stage.getStageName())) { return stage.getMetrics(); } } return null; } }
package gov.nih.nci.cananolab.ui.core; import gov.nih.nci.cananolab.dto.common.AccessibilityBean; import gov.nih.nci.cananolab.exception.BaseException; import gov.nih.nci.cananolab.exception.InvalidSessionException; import gov.nih.nci.cananolab.exception.NoAccessException; import gov.nih.nci.cananolab.exception.SecurityException; import gov.nih.nci.cananolab.service.security.SecurityService; import gov.nih.nci.cananolab.service.security.UserBean; import gov.nih.nci.cananolab.util.Constants; import gov.nih.nci.cananolab.util.StringUtils; import java.util.Enumeration; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.actions.DispatchAction; public abstract class AbstractDispatchAction extends DispatchAction { public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { HttpSession session = request.getSession(); String dispatch = (String) request.getParameter("dispatch"); String page = request.getParameter("page"); // per app scan, validate dispatch and page parameters if (page != null && !page.matches(Constants.NUMERIC_PATTERN)) { throw new BaseException("Invalid value for the page parameter"); } if (dispatch == null) { throw new BaseException("The dispatch parameter can not be null"); } if (dispatch != null && !dispatch.matches(Constants.STRING_PATTERN)) { throw new BaseException("Invalid value for the dispatch parameter"); } UserBean user = (UserBean) session.getAttribute("user"); // private dispatch and session expired boolean privateDispatch = isDispatchPublic(dispatch); if (session.isNew() && (dispatch == null || privateDispatch)) { throw new InvalidSessionException(); } // if dispatched methods require validation but page=0 throw error if (dispatchWithValidation(dispatch) && (page == null || Integer.parseInt(page) <= 0)) { throw new BaseException( "The value for the page parameter is invalid for the given dispatch"); } String protectedData = request.getParameter("sampleId"); if (protectedData == null) { protectedData = request.getParameter("publicationId"); } if (protectedData == null) { protectedData = request.getParameter("protocolId"); } boolean executeStatus = canUserExecute(request, dispatch, protectedData); if (executeStatus) { return super.execute(mapping, form, request, response); } else { if (user == null) { throw new NoAccessException("Log in is required"); } request.getSession().removeAttribute("user"); throw new NoAccessException(); } } /** * Check whether the current user can execute the action with the specified * dispatch * * @param user * @return * @throws SecurityException */ public boolean canUserExecute(HttpServletRequest request, String dispatch, String protectedData) throws Exception { UserBean user = (UserBean) request.getSession().getAttribute("user"); // private dispatch in public actions boolean privateDispatch = isDispatchPublic(dispatch); if (!privateDispatch) { return true; } else if (user == null && privateDispatch) { return false; } else { return canUserExecutePrivateDispatch(request, protectedData); } } public Boolean canUserExecutePrivateDispatch(HttpServletRequest request, String protectedData) throws Exception { UserBean user = (UserBean) request.getSession().getAttribute("user"); if (user == null) { return false; } return true; } public boolean isDispatchPublic(String dispatch) { if (dispatch != null) { for (String theDispatch : Constants.PRIVATE_DISPATCHES) { if (dispatch.startsWith(theDispatch)) { return true; } } } return false; } private boolean dispatchWithValidation(String dispatch) { if (dispatch != null) { for (String theDispatch : Constants.DISPATCHES_WITH_VALIDATIONS) { if (dispatch.startsWith(theDispatch)) { return true; } } } return false; } /** * Get the page number used in display tag library pagination * * @param request * @return */ public int getDisplayPage(HttpServletRequest request) { int page = 0; Enumeration paramNames = request.getParameterNames(); while (paramNames.hasMoreElements()) { String name = (String) paramNames.nextElement(); if (name != null && name.startsWith("d-") && name.endsWith("-p")) { String pageValue = request.getParameter(name); if (pageValue != null) { page = Integer.parseInt(pageValue) - 1; } } } return page; } public String getBrowserDispatch(HttpServletRequest request) { String dispatch = request.getParameter("dispatch"); // get the dispatch value from the URL in the browser address bar // used in case of validation if (dispatch != null && request.getAttribute("javax.servlet.forward.query_string") != null) { String browserQueryString = request.getAttribute( "javax.servlet.forward.query_string").toString(); if (!StringUtils.isEmpty(browserQueryString)) { String browserDispatch = browserQueryString.replaceAll( "dispatch=(.+)&(.+)", "$1"); return browserDispatch; } } return ""; } /** * Retrieve a value from request by name in the order of Parameter - Request * Attribute - Session Attribute * * @param request * @param name * @return */ public Object getValueFromRequest(HttpServletRequest request, String name) { Object value = request.getParameter(name); if (value == null) { value = request.getAttribute(name); } if (value == null) { value = request.getSession().getAttribute(name); } return value; } protected SecurityService getSecurityServiceFromSession( HttpServletRequest request) throws Exception { if (request.getSession().getAttribute("securityService") == null) { UserBean user = (UserBean) request.getAttribute("user"); SecurityService service = new SecurityService( AccessibilityBean.CSM_APP_NAME, user); return service; } SecurityService securityService = (SecurityService) request .getSession().getAttribute("securityService"); return securityService; } }
package com.intellij.psi.impl.source.resolve.reference.impl.providers; import com.intellij.codeInsight.daemon.EmptyResolveMessageProvider; import com.intellij.codeInsight.daemon.JavaErrorMessages; import com.intellij.codeInsight.daemon.QuickFixProvider; import com.intellij.codeInsight.daemon.impl.HighlightInfo; import com.intellij.codeInspection.LocalQuickFix; import com.intellij.codeInspection.LocalQuickFixProvider; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.impl.source.resolve.reference.ProcessorRegistry; import com.intellij.psi.impl.source.resolve.reference.impl.GenericReference; import com.intellij.psi.scope.BaseScopeProcessor; import com.intellij.psi.scope.PsiConflictResolver; import com.intellij.psi.scope.PsiScopeProcessor; import com.intellij.psi.scope.conflictResolvers.DuplicateConflictResolver; import com.intellij.psi.search.PsiElementProcessor; import com.intellij.util.ArrayUtil; import com.intellij.util.IncorrectOperationException; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; /** * @author cdr */ public class FileReference implements PsiPolyVariantReference, QuickFixProvider<FileReference>, LocalQuickFixProvider, EmptyResolveMessageProvider { public static final FileReference[] EMPTY = new FileReference[0]; private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.source.resolve.reference.impl.providers.FileReference"); private final int myIndex; private TextRange myRange; private final String myText; @NotNull private final FileReferenceSet myFileReferenceSet; private ResolveResult[] myCachedResult; public FileReference(final @NotNull FileReferenceSet fileReferenceSet, TextRange range, int index, String text) { myFileReferenceSet = fileReferenceSet; myIndex = index; myRange = range; myText = text; } @NotNull private Collection<PsiFileSystemItem> getContexts() { final FileReference contextRef = getContextReference(); if (contextRef == null) { return myFileReferenceSet.getDefaultContexts(); } ResolveResult[] resolveResults = contextRef.multiResolve(false); ArrayList<PsiFileSystemItem> result = new ArrayList<PsiFileSystemItem>(); for (ResolveResult resolveResult : resolveResults) { result.add((PsiFileSystemItem)resolveResult.getElement()); } return result; } @NotNull public ResolveResult[] multiResolve(final boolean incompleteCode) { if (myCachedResult == null) { myCachedResult = innerResolve(); } return myCachedResult; } protected ResolveResult[] innerResolve() { final String referenceText = getText(); final Collection<PsiFileSystemItem> contexts = getContexts(); final Collection<ResolveResult> result = new ArrayList<ResolveResult>(contexts.size()); for (final PsiFileSystemItem context : contexts) { innerResolveInContext(referenceText, context, result); } final int resultCount = result.size(); return resultCount > 0 ? result.toArray(new ResolveResult[resultCount]) : ResolveResult.EMPTY_ARRAY; } private void innerResolveInContext(final String text, final PsiFileSystemItem context, final Collection<ResolveResult> result) { if (text.length() == 0 && !myFileReferenceSet.isEndingSlashNotAllowed() && isLast() || ".".equals(text) || "/".equals(text)) { result.add(new PsiElementResolveResult(context)); } else if ("..".equals(text)) { final PsiFileSystemItem resolved = context.getParent(); if (resolved != null) { result.add(new PsiElementResolveResult(resolved)); } } else { final int separatorIndex = text.indexOf('/'); if (separatorIndex >= 0) { final List<ResolveResult> resolvedContexts = new ArrayList<ResolveResult>(); innerResolveInContext(text.substring(0, separatorIndex), context, resolvedContexts); final String restOfText = text.substring(separatorIndex + 1); for (ResolveResult contextVariant : resolvedContexts) { innerResolveInContext(restOfText, (PsiFileSystemItem)contextVariant.getElement(), result); } } else { processVariants(context, new BaseScopeProcessor() { public boolean execute(final PsiElement element, final PsiSubstitutor substitutor) { final String name = ((PsiFileSystemItem)element).getName(); if (myFileReferenceSet.isCaseSensitive() ? text.equals(name) : text.compareToIgnoreCase(name) == 0) { result.add(new PsiElementResolveResult(element)); return false; } return true; } }); } } } public Object[] getVariants() { final String s = getText(); if (s != null && s.equals("/")) { return ArrayUtil.EMPTY_OBJECT_ARRAY; } try { final List ret = new ArrayList(); final List<Class> allowedClasses = new ArrayList<Class>(); allowedClasses.add(PsiFile.class); for (final FileReferenceHelper helper : getHelpers()) { allowedClasses.add(helper.getDirectoryClass()); } final List<PsiConflictResolver> resolvers = Arrays.<PsiConflictResolver>asList(new DuplicateConflictResolver()); final PsiElementProcessor<PsiFileSystemItem> processor = createChildrenProcessor(myFileReferenceSet.createProcessor(ret, allowedClasses, resolvers)); for (PsiFileSystemItem context : getContexts()) { for (final PsiElement child : context.getChildren()) { if (child instanceof PsiFileSystemItem) { processor.execute((PsiFileSystemItem)child); } } } return ret.toArray(); } catch (ProcessorRegistry.IncompatibleReferenceTypeException e) { LOG.error(e); return ArrayUtil.EMPTY_OBJECT_ARRAY; } } private void processVariants(final PsiFileSystemItem context, final PsiScopeProcessor processor) { context.processChildren(createChildrenProcessor(processor)); } private PsiElementProcessor<PsiFileSystemItem> createChildrenProcessor(final PsiScopeProcessor processor) { return new PsiElementProcessor<PsiFileSystemItem>() { public boolean execute(PsiFileSystemItem element) { final VirtualFile file = element.getVirtualFile(); if (file != null && !file.isDirectory()) { final PsiFile psiFile = getElement().getManager().findFile(file); if (psiFile != null) { element = psiFile; } } return processor.execute(element, PsiSubstitutor.EMPTY); } }; } @Nullable private FileReference getContextReference() { return myIndex > 0 ? myFileReferenceSet.getReference(myIndex - 1) : null; } public PsiElement getElement() { return myFileReferenceSet.getElement(); } public PsiFileSystemItem resolve() { ResolveResult[] resolveResults = multiResolve(false); return resolveResults.length == 1 ? (PsiFileSystemItem)resolveResults[0].getElement() : null; } public boolean isReferenceTo(PsiElement element) { if (!(element instanceof PsiFileSystemItem)) return false; final PsiFileSystemItem item = resolve(); return item != null && FileReferenceHelperRegistrar.areElementsEquivalent(item, (PsiFileSystemItem)element); } public TextRange getRangeInElement() { return myRange; } public String getCanonicalText() { return myText; } protected String getText() { return myText; } public boolean isSoft() { return myFileReferenceSet.isSoft(); } public PsiElement handleElementRename(String newElementName) throws IncorrectOperationException { final ElementManipulator<PsiElement> manipulator = GenericReference.getManipulator(getElement()); if (manipulator != null) { myFileReferenceSet.setElement(manipulator.handleContentChange(getElement(), getRangeInElement(), newElementName)); //Correct ranges int delta = newElementName.length() - myRange.getLength(); myRange = new TextRange(getRangeInElement().getStartOffset(), getRangeInElement().getStartOffset() + newElementName.length()); FileReference[] references = myFileReferenceSet.getAllReferences(); for (int idx = myIndex + 1; idx < references.length; idx++) { references[idx].myRange = references[idx].myRange.shiftRight(delta); } return myFileReferenceSet.getElement(); } throw new IncorrectOperationException("Manipulator for this element is not defined"); } public PsiElement bindToElement(PsiElement element) throws IncorrectOperationException { if (!(element instanceof PsiFileSystemItem)) throw new IncorrectOperationException("Cannot bind to element, should be instanceof PsiFileSystemItem: " + element); final PsiFileSystemItem fileSystemItem = (PsiFileSystemItem)element; VirtualFile dstVFile = fileSystemItem.getVirtualFile(); if (dstVFile == null) throw new IncorrectOperationException("Cannot bind to non-physical element:" + element); final PsiFile file = getElement().getContainingFile(); final VirtualFile curVFile = file.getVirtualFile(); if (curVFile == null) throw new IncorrectOperationException("Cannot bind from non-physical element:" + file); final Project project = element.getProject(); final String newName; if (myFileReferenceSet.isAbsolutePathReference()) { PsiFileSystemItem root = null; PsiFileSystemItem dstItem = null; for (final FileReferenceHelper helper : FileReferenceHelperRegistrar.getHelpers()) { PsiFileSystemItem _dstItem = helper.getPsiFileSystemItem(project, dstVFile); if (_dstItem != null) { PsiFileSystemItem _root = helper.findRoot(project, dstVFile); if (_root != null) { root = _root; dstItem = _dstItem; break; } } } if (root == null) return element; newName = "/" + PsiFileSystemItemUtil.getNotNullRelativePath(root, dstItem); } else { PsiFileSystemItem curItem = null; PsiFileSystemItem dstItem = null; for (final FileReferenceHelper helper : FileReferenceHelperRegistrar.getHelpers()) { PsiFileSystemItem _curItem = helper.getPsiFileSystemItem(project, curVFile); if (_curItem != null) { PsiFileSystemItem _dstItem = helper.getPsiFileSystemItem(project, dstVFile); if (_dstItem != null) { curItem = _curItem; dstItem = _dstItem; break; } } } checkNotNull(curItem, curVFile, dstVFile); newName = PsiFileSystemItemUtil.getNotNullRelativePath(curItem, dstItem); } final TextRange range = new TextRange(myFileReferenceSet.getStartInElement(), getRangeInElement().getEndOffset()); final ElementManipulator<PsiElement> manipulator = GenericReference.getManipulator(getElement()); if (manipulator == null) { throw new IncorrectOperationException("Manipulator not defined for: " + getElement()); } return manipulator.handleContentChange(getElement(), range, newName); } private static void checkNotNull(final Object o, final VirtualFile curVFile, final VirtualFile dstVFile) throws IncorrectOperationException { if (o == null) { throw new IncorrectOperationException("Cannot find path between files; src = " + curVFile.getPresentableUrl() + "; dst = " + dstVFile.getPresentableUrl()); } } public void registerQuickfix(HighlightInfo info, FileReference reference) { for (final FileReferenceHelper helper : getHelpers()) { helper.registerFixes(info, reference); } } protected List<FileReferenceHelper> getHelpers() { return FileReferenceHelperRegistrar.getHelpers(); } public int getIndex() { return myIndex; } public String getUnresolvedMessagePattern() { final StringBuffer builder = new StringBuffer(JavaErrorMessages.message("error.cannot.resolve")); builder.append(" ").append(myFileReferenceSet.getTypeName()); if (!isLast()) { for (final FileReferenceHelper helper : getHelpers()) { builder.append(" ").append(JavaErrorMessages.message("error.cannot.resolve.infix")).append(" ") .append(helper.getDirectoryTypeName()); } } builder.append(" ''{0}''."); return builder.toString(); } public final boolean isLast() { return myIndex == myFileReferenceSet.getAllReferences().length - 1; } @NotNull public FileReferenceSet getFileReferenceSet() { return myFileReferenceSet; } public void clearResolveCaches() { myCachedResult = null; } public LocalQuickFix[] getQuickFixes() { final List<LocalQuickFix> result = new ArrayList<LocalQuickFix>(); for (final FileReferenceHelper<?> helper : getHelpers()) { result.addAll(helper.registerFixes(null, this)); } return result.toArray(new LocalQuickFix[result.size()]); } }
// checkstyle: Checks Java source code for adherence to a set of rules. // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // This library is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // You should have received a copy of the GNU Lesser General Public // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA package com.puppycrawl.tools.checkstyle.checks.indentation; import com.puppycrawl.tools.checkstyle.api.DetailAST; import com.puppycrawl.tools.checkstyle.api.TokenTypes; /** * Handler for method definitions. * * @author jrichard, Maikel Steneker */ public class MethodDefHandler extends BlockParentHandler { /** * Construct an instance of this handler with the given indentation check, * abstract syntax tree, and parent handler. * * @param aIndentCheck the indentation check * @param aAst the abstract syntax tree * @param aParent the parent handler */ public MethodDefHandler(IndentationCheck aIndentCheck, DetailAST aAst, ExpressionHandler aParent) { super(aIndentCheck, (aAst.getType() == TokenTypes.CTOR_DEF) ? "ctor def" : "method def", aAst, aParent); } @Override protected DetailAST getToplevelAST() { // we check this stuff ourselves below return null; } /** * Check the indentation of the method name. */ private void checkIdent() { final DetailAST ident = getMainAst().findFirstToken(TokenTypes.IDENT); final int columnNo = expandedTabsColumnNo(ident); if (startsLine(ident) && !getLevel().accept(columnNo)) { logError(ident, "", columnNo); } } /** * Check the indentation of the throws clause. */ private void checkThrows() { final DetailAST throwsAst = getMainAst().findFirstToken(TokenTypes.LITERAL_THROWS); if (throwsAst == null) { return; } final int columnNo = expandedTabsColumnNo(throwsAst); final IndentLevel expectedColumnNo = new IndentLevel(getLevel(), getBasicOffset()); expectedColumnNo.addAcceptedIndent( new IndentLevel(getLevel(), 2 * getBasicOffset())); if (startsLine(throwsAst) && !expectedColumnNo.accept(columnNo)) { logError(throwsAst, "throws", columnNo, expectedColumnNo); } } /** * Check the indentation of the method type. */ private void checkType() { final DetailAST type = getMainAst().findFirstToken(TokenTypes.TYPE); final DetailAST ident = ExpressionHandler.getFirstToken(type); final int columnNo = expandedTabsColumnNo(ident); if (startsLine(ident) && !getLevel().accept(columnNo)) { logError(ident, "return type", columnNo); } } /** * Check the indentation of the method parameters. */ private void checkParameters() { final DetailAST params = getMainAst().findFirstToken(TokenTypes.PARAMETERS); checkExpressionSubtree(params, getLevel(), false, false); } @Override public void checkIndentation() { checkModifiers(); checkIdent(); checkThrows(); if (getMainAst().getType() != TokenTypes.CTOR_DEF) { checkType(); } checkParameters(); if (getLCurly() == null) { // asbtract method def -- no body return; } super.checkIndentation(); } }
package com.tobszarny.intellij.plugin.activetabhighlighter; import java.awt.*; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.WeakHashMap; import org.jetbrains.annotations.NotNull; import com.intellij.openapi.application.ex.ApplicationManagerEx; import com.intellij.openapi.components.ApplicationComponent; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileEditor.FileEditor; import com.intellij.openapi.fileEditor.FileEditorManagerEvent; import com.intellij.openapi.fileEditor.FileEditorManagerListener; import com.intellij.ui.ColorUtil; import com.intellij.ui.Gray; import com.intellij.ui.tabs.JBTabsPosition; import com.intellij.ui.tabs.impl.DefaultEditorTabsPainter; import com.intellij.ui.tabs.impl.JBEditorTabs; import com.intellij.ui.tabs.impl.JBEditorTabsPainter; import com.intellij.ui.tabs.impl.ShapeTransform; import com.intellij.util.ReflectionUtil; import com.intellij.util.messages.MessageBus; import com.intellij.util.messages.MessageBusConnection; import com.intellij.util.ui.UIUtil; import com.intellij.util.xmlb.annotations.Transient; import net.sf.cglib.proxy.Enhancer; import net.sf.cglib.proxy.MethodInterceptor; import net.sf.cglib.proxy.MethodProxy; @SuppressWarnings("UseJBColor") public final class TabsPainterPatcherComponent implements ApplicationComponent { private static final Logger LOG = Logger.getInstance(TabsPainterPatcherComponent.class); private static Config config = new Config(); private WeakHashMap<TabsPainter, String> map = new WeakHashMap<>(); public TabsPainterPatcherComponent() { } public static TabsPainterPatcherComponent getInstance() { return ServiceManager.getService(TabsPainterPatcherComponent.class); } public static void onColorsChanged(@NotNull Config config) { TabsPainterPatcherComponent.config = config; TabsPainterPatcherComponent instance = getInstance(); for (TabsPainter editorGroupsTabsPainter : instance.map.keySet()) { setColors(editorGroupsTabsPainter); } } @SuppressWarnings("UseJBColor") private static void setColors(TabsPainter editorGroupsTabsPainter) { if (editorGroupsTabsPainter != null) { if (editorGroupsTabsPainter instanceof DarculaTabsPainter) { editorGroupsTabsPainter.setMask(new Color(config.getDarcula_mask()), config.getDarcula_opacity()); editorGroupsTabsPainter.setActiveColor(new Color(config.getDarcula_activeColor())); } else { editorGroupsTabsPainter.setMask(new Color(config.getClassic_mask()), config.getClassic_opacity()); editorGroupsTabsPainter.setActiveColor(new Color(config.getClassic_activeColor())); } JBEditorTabs painterTabs = editorGroupsTabsPainter.getTabs(); if (!painterTabs.isDisposed()) { painterTabs.repaint(); } } } @Override public void initComponent() { final MessageBus bus = ApplicationManagerEx.getApplicationEx().getMessageBus(); final MessageBusConnection connect = bus.connect(); connect.subscribe(FileEditorManagerListener.FILE_EDITOR_MANAGER, new FileEditorManagerListener() { @Override public void selectionChanged(@NotNull final FileEditorManagerEvent event) { if (!config.isEnabled()) { return; } final FileEditor editor = event.getNewEditor(); if (editor != null) { Component component = editor.getComponent(); while (component != null) { if (component instanceof JBEditorTabs) { patchPainter((JBEditorTabs) component); return; } component = component.getParent(); } } } }); } private void patchPainter(final JBEditorTabs component) { if (alreadyPatched(component)) { return; } final TabsPainter tabsPainter = new TabsPainter(component); init(tabsPainter); final TabsPainter darculaTabsPainter = new DarculaTabsPainter(component); init(darculaTabsPainter); LOG.info("HACK: Overriding JBEditorTabsPainters"); ReflectionUtil.setField(JBEditorTabs.class, component, JBEditorTabsPainter.class, "myDefaultPainter", proxy(tabsPainter)); ReflectionUtil.setField(JBEditorTabs.class, component, JBEditorTabsPainter.class, "myDarkPainter", proxy(darculaTabsPainter)); } private JBEditorTabsPainter proxy(TabsPainter tabsPainter) { Field fillPathField = null; Field pathField = null; Field labelPathField = null; try { final Class<?> clazz = Class.forName("com.intellij.ui.tabs.impl.JBTabsImpl$ShapeInfo"); fillPathField = clazz.getField("fillPath"); pathField = clazz.getField("path"); labelPathField = clazz.getField("labelPath"); } catch (Exception e) { LOG.error(e); } Field finalFillPathField = fillPathField; Field finalPathField = pathField; Field finalLabelPathField = labelPathField; return (TabsPainter) Enhancer.create(TabsPainter.class, new MethodInterceptor() { boolean broken = false; @Override public Object intercept(Object o, Method method, Object[] objects, MethodProxy methodProxy) throws Throwable { final Object result = method.invoke(tabsPainter, objects); try { if (!broken) { if ("paintSelectionAndBorder".equals(method.getName())) { TabsPainterPatcherComponent.this.paintSelectionAndBorder(objects, tabsPainter, finalFillPathField, finalPathField, finalLabelPathField); } } } catch (Exception e) { LOG.error(e); broken = true; } return result; } }); } /** kinda like the original */ private void paintSelectionAndBorder(Object[] objects, TabsPainter tabsPainter, Field fillPathField, Field pathField, Field labelPathField) throws IllegalAccessException { // Retrieve arguments final Graphics2D g2d = (Graphics2D) objects[0]; final Rectangle rect = (Rectangle) objects[1]; final Object selectedShape = objects[2]; final Insets insets = (Insets) objects[3]; final Color tabColor = (Color) objects[4]; JBEditorTabs myTabs = tabsPainter.getTabs(); final JBTabsPosition position = myTabs.getTabsPosition(); final boolean horizontalTabs = myTabs.isHorizontalTabs(); int _x = rect.x; int _y = rect.y; int _height = rect.height; boolean hasFocus = JBEditorTabsPainter.hasFocus(myTabs); ShapeTransform shapeTransform = null; if (fillPathField != null) { shapeTransform = (ShapeTransform) fillPathField.get(selectedShape); } if (myTabs.hasUnderlineSelection() /* && myTabs.getTabCount() > 1 */) { // darcula if (shapeTransform != null && hasFocus) { fillSelectionAndBorder(g2d, tabsPainter, hasFocus, shapeTransform); } Color underline = new Color(config.getUnderlineColor()); Color underlineColor_inactive = new Color(config.getUnderlineColor_inactive()); int underlineOpacity_inactive = config.getUnderlineOpacity_inactive(); Color inactiveUnderline = ColorUtil.withAlpha(underlineColor_inactive, underlineOpacity_inactive / 100.0); g2d.setColor(hasFocus ? underline : inactiveUnderline); int thickness = 3; if (position == JBTabsPosition.bottom) { g2d.fillRect(rect.x, rect.y - 1, rect.width, thickness); } else if (position == JBTabsPosition.top) { g2d.fillRect(rect.x, rect.y + rect.height - thickness + 1, rect.width, thickness); g2d.setColor(UIUtil.CONTRAST_BORDER_COLOR); g2d.drawLine(Math.max(0, rect.x - 1), rect.y, rect.x + rect.width, rect.y); } else if (position == JBTabsPosition.left) { g2d.fillRect(rect.x + rect.width - thickness + 1, rect.y, thickness, rect.height); } else if (position == JBTabsPosition.right) { g2d.fillRect(rect.x, rect.y, thickness, rect.height); } } else { // classic skin if (pathField == null || labelPathField == null) { return; } final ShapeTransform path = (ShapeTransform) pathField.get(selectedShape); final ShapeTransform labelPath = (ShapeTransform) labelPathField.get(selectedShape); Insets i = path.transformInsets(insets); if (shapeTransform != null && hasFocus) { fillSelectionAndBorder(g2d, tabsPainter, hasFocus, shapeTransform); } g2d.setColor(Gray._0.withAlpha(15)); g2d.draw(labelPath.transformLine(i.left, labelPath.getMaxY(), path.getMaxX(), labelPath.getMaxY())); } } private void fillSelectionAndBorder(Graphics2D g2d, TabsPainter tabsPainter, boolean hasFocus, ShapeTransform fillPath) throws IllegalAccessException { g2d.setColor(hasFocus ? tabsPainter.getActiveColor() : tabsPainter.getInactiveMaskColor()); g2d.fill(fillPath.getShape()); } private boolean alreadyPatched(JBEditorTabs component) { if (UIUtil.isUnderDarcula()) { JBEditorTabsPainter painter = ReflectionUtil.getField(JBEditorTabs.class, component, JBEditorTabsPainter.class, "myDarkPainter"); if (painter instanceof TabsPainter) { return true; } if (!painter.getClass().getPackage().getName().startsWith("com.intellij")) { // some other plugin LOG.warn("JBEditorTabsPainter already patched by " + painter.getClass().getCanonicalName()); } } else { JBEditorTabsPainter painter = ReflectionUtil.getField(JBEditorTabs.class, component, JBEditorTabsPainter.class, "myDefaultPainter"); if (painter instanceof TabsPainter) { return true; } if (!painter.getClass().getPackage().getName().startsWith("com.intellij")) { // some other plugin LOG.warn("JBEditorTabsPainter already patched by " + painter.getClass().getCanonicalName()); } } return false; } public void init(TabsPainter tabsPainter) { setColors(tabsPainter); map.put(tabsPainter, null); } public static class TabsPainter extends DefaultEditorTabsPainter { private JBEditorTabs tabs; protected Color inactiveMaskColor; protected Color activeColor; /** for proxy */ public TabsPainter() { super(null); } /** * @see DefaultEditorTabsPainter#getInactiveMaskColor() */ public TabsPainter(final JBEditorTabs tabs) { super(tabs); this.tabs = tabs; inactiveMaskColor = ColorUtil.withAlpha(Config.DEFAULT_MASK, (Config.DEFAULT_OPACITY / 100.0)); } public JBEditorTabs getTabs() { return tabs; } @Override public Color getDefaultTabColor() { return super.getDefaultTabColor(); } protected Color getInactiveMaskColor() { return inactiveMaskColor; } public void setMask(Color mask, int opacity) { this.inactiveMaskColor = ColorUtil.withAlpha(mask, (opacity / 100.0)); } public Color getActiveColor() { return activeColor; } public void setActiveColor(Color activeColor) { this.activeColor = activeColor; } } /** * @see com.intellij.ui.tabs.impl.DarculaEditorTabsPainter */ public static class DarculaTabsPainter extends TabsPainter { public DarculaTabsPainter(JBEditorTabs component) { super(component); inactiveMaskColor = ColorUtil.withAlpha(Config.DEFAULT_DARCULA_MASK, (Config.DEFAULT_DARCULA_OPACITY / 100.0)); } /** same as @see com.intellij.ui.tabs.impl.DarculaEditorTabsPainter */ @Override public Color getDefaultTabColor() { if (myDefaultTabColor != null) { return myDefaultTabColor; } return new Color(0x515658); } } static class Config { public static final Color DEFAULT_MASK = new Color(0x262626); public static final int DEFAULT_OPACITY = 20; public static final Color DEFAULT_DARCULA_MASK = new Color(0x262626); public static final int DEFAULT_DARCULA_OPACITY = 50; // too low /** disabling requires restart */ private boolean enabled = true; private Integer classic_mask = DEFAULT_MASK.getRGB(); private int classic_opacity = DEFAULT_OPACITY; // currently not used - delete? private Integer darcula_mask = DEFAULT_DARCULA_MASK.getRGB(); private int darcula_opacity = 50; private Integer underlineColor = new Color(0x439EB8).getRGB(); private Integer underlineColor_inactive = Color.BLACK.getRGB(); private int underlineOpacity_inactive = 0; private int darcula_activeColor = new Color(173, 46, 156).getRGB(); private int classic_activeColor = new Color(173, 46, 156).getRGB(); public boolean isEnabled() { return enabled; } public void setEnabled(boolean enabled) { this.enabled = enabled; } public Integer getClassic_mask() { return classic_mask; } public void setClassic_mask(Integer classic_mask) { this.classic_mask = classic_mask; } public int getClassic_opacity() { return classic_opacity; } public void setClassic_opacity(int classic_opacity) { this.classic_opacity = classic_opacity; } public Integer getDarcula_mask() { return darcula_mask; } public void setDarcula_mask(Integer darcula_mask) { this.darcula_mask = darcula_mask; } public int getDarcula_opacity() { return darcula_opacity; } public void setDarcula_opacity(int darcula_opacity) { this.darcula_opacity = darcula_opacity; } public Integer getUnderlineColor() { return underlineColor; } public void setUnderlineColor(Integer underlineColor) { this.underlineColor = underlineColor; } public Integer getUnderlineColor_inactive() { return underlineColor_inactive; } public void setUnderlineColor_inactive(Integer underlineColor_inactive) { this.underlineColor_inactive = underlineColor_inactive; } public int getUnderlineOpacity_inactive() { return underlineOpacity_inactive; } public void setUnderlineOpacity_inactive(int underlineOpacity_inactive) { this.underlineOpacity_inactive = underlineOpacity_inactive; } @Transient public void setClassicOpacity(String text) { try { this.classic_opacity = parse(text); } catch (Exception e) { classic_opacity = DEFAULT_OPACITY; } } @Transient public void setDarculaOpacity(String text) { try { this.darcula_opacity = parse(text); } catch (Exception e) { darcula_opacity = DEFAULT_DARCULA_OPACITY; } } private int parse(String text) { int opacity = Integer.parseInt(text); if (opacity > 100) { opacity = 100; } else if (opacity < 0) { opacity = 0; } return opacity; } public int getDarcula_activeColor() { return darcula_activeColor; } public void setDarcula_activeColor(int darcula_activeColor) { this.darcula_activeColor = darcula_activeColor; } public int getClassic_activeColor() { return classic_activeColor; } public void setClassic_activeColor(int classic_activeColor) { this.classic_activeColor = classic_activeColor; } } }
package com.censoredsoftware.Demigods.Episodes.Demo.Deity.Insignian; import java.util.*; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.Material; import org.bukkit.block.BlockFace; import org.bukkit.entity.EntityType; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; import org.bukkit.event.entity.EntityDamageEvent; import org.bukkit.event.player.AsyncPlayerChatEvent; import org.bukkit.event.player.PlayerMoveEvent; import org.bukkit.scheduler.BukkitRunnable; import com.censoredsoftware.Demigods.Engine.Demigods; import com.censoredsoftware.Demigods.Engine.Object.Ability.Ability; import com.censoredsoftware.Demigods.Engine.Object.Ability.AbilityInfo; import com.censoredsoftware.Demigods.Engine.Object.Ability.Devotion; import com.censoredsoftware.Demigods.Engine.Object.Deity.Deity; import com.censoredsoftware.Demigods.Engine.Object.Deity.DeityInfo; import com.censoredsoftware.Demigods.Engine.Utility.MiscUtility; import com.censoredsoftware.Demigods.Engine.Utility.StructureUtility; import com.censoredsoftware.Demigods.Engine.Utility.UnicodeUtility; import com.censoredsoftware.Demigods.Engine.Utility.ZoneUtility; import com.google.common.collect.Maps; public class OmegaX17 extends Deity { private static String name = "OmegaX17", alliance = "Insignian", permission = "demigods.insignian.omega"; private static ChatColor color = ChatColor.BLACK; private static Set<Material> claimItems = new HashSet<Material>() { { add(Material.TNT); } }; private static List<String> lore = new ArrayList<String>() { { add(" "); add(ChatColor.AQUA + " Demigods > " + ChatColor.RESET + color + name); add(ChatColor.RESET + " add(ChatColor.YELLOW + " Claim Items:"); for(Material item : claimItems) { add(ChatColor.GRAY + " " + UnicodeUtility.rightwardArrow() + " " + ChatColor.WHITE + item.name()); } add(ChatColor.YELLOW + " Abilities:"); } }; private static Type type = Type.DEMO; private static Set<Ability> abilities = new HashSet<Ability>() { { add(new SplosionWalking()); add(new NoSplosion()); add(new Equalizer()); } }; public OmegaX17() { super(new DeityInfo(name, alliance, permission, color, claimItems, lore, type), abilities); } } class SplosionWalking extends Ability { private static String deity = "OmegaX17", name = "Explosion Walking", command = null, permission = "demigods.insignian.omega"; private static int cost = 0, delay = 0, repeat = 30; private static AbilityInfo info; private static List<String> details = new ArrayList<String>() { { add(ChatColor.GRAY + " " + UnicodeUtility.rightwardArrow() + " " + ChatColor.WHITE + "The end of all things."); } }; private static Devotion.Type type = Devotion.Type.SUPPORT; protected SplosionWalking() { super(info = new AbilityInfo(deity, name, command, permission, cost, delay, repeat, details, type), null, new BukkitRunnable() { @Override public void run() { for(Player online : Bukkit.getOnlinePlayers()) { if(Deity.canUseDeitySilent(online, "OmegaX17") && online.getLocation().getBlock().getRelative(BlockFace.DOWN).getType().isSolid() && !online.isFlying() && !ZoneUtility.zoneNoPVP(online.getLocation()) && !StructureUtility.isTrespassingInNoGriefingZone(online)) doIt(online); } } public void doIt(Player player) { for(int i = 0; i < MiscUtility.generateIntRange(4, 30); i++) { player.getWorld().spawnEntity(player.getLocation(), EntityType.PRIMED_TNT); } } }); } } class NoSplosion extends Ability { private static String deity = "OmegaX17", name = "No Explosion Damage", command = null, permission = "demigods.insignian.omega"; private static int cost = 0, delay = 0, repeat = 0; private static List<String> details = new ArrayList<String>() { { add(ChatColor.GRAY + " " + UnicodeUtility.rightwardArrow() + " " + ChatColor.WHITE + "Take no damage from explosions."); } }; private static Devotion.Type type = Devotion.Type.PASSIVE; protected NoSplosion() { super(new AbilityInfo(deity, name, command, permission, cost, delay, repeat, details, type), new Listener() { @EventHandler(priority = EventPriority.MONITOR) public void onEntityDamange(EntityDamageEvent damageEvent) { if(damageEvent.getEntity() instanceof Player) { Player player = (Player) damageEvent.getEntity(); if(!Deity.canUseDeitySilent(player, deity)) return; // If the player receives falling damage, cancel it if(damageEvent.getCause() == EntityDamageEvent.DamageCause.BLOCK_EXPLOSION || damageEvent.getCause() == EntityDamageEvent.DamageCause.ENTITY_EXPLOSION) damageEvent.setCancelled(true); } } }, null); } } class Equalizer extends Ability { private static String deity = "OmegaX17", name = "Omega Equalizer", command = null, permission = "demigods.insignian.omega"; private static int cost = 0, delay = 0, repeat = 600; private static AbilityInfo info; private static List<String> details = new ArrayList<String>() { { add(ChatColor.GRAY + " " + UnicodeUtility.rightwardArrow() + " " + ChatColor.WHITE + "Prevent Omega from being too OP."); } }; private static Devotion.Type type = Devotion.Type.PASSIVE; private static Map<Player, String> equalizing = Maps.newHashMap(); protected Equalizer() { super(info = new AbilityInfo(deity, name, command, permission, cost, delay, repeat, details, type), new Listener() { @EventHandler(priority = EventPriority.HIGHEST) public void onAsyncPlayerChat(AsyncPlayerChatEvent chatEvent) { Player player = chatEvent.getPlayer(); if(equalizing.containsKey(player) && chatEvent.getMessage().equals(equalizing.get(player))) { chatEvent.setCancelled(true); equalizing.remove(player); player.sendMessage(ChatColor.YELLOW + "Hooray! You may now continue being OP."); } } @EventHandler(priority = EventPriority.HIGHEST) public void onPlayerMove(PlayerMoveEvent moveEvent) { Player player = moveEvent.getPlayer(); if(equalizing.containsKey(player)) moveEvent.setCancelled(true); } }, new BukkitRunnable() { @Override public void run() { for(Player online : Bukkit.getOnlinePlayers()) { if(Deity.canUseDeitySilent(online, "OmegaX17") && !tooSlow(online) && MiscUtility.generateIntRange(0, 19) == 1) equalize(online); } } public boolean tooSlow(Player player) { if(equalizing.containsKey(player)) { player.sendMessage(ChatColor.YELLOW + "Too slow, try again:"); equalize(player); return true; } return false; } public void equalize(Player player) { player.sendMessage(ChatColor.DARK_RED + Demigods.message.chatTitle("Equalizer")); equalizing.put(player, MiscUtility.generateString(19)); player.sendMessage(ChatColor.YELLOW + " Here is your code (you have 10 seconds to say it in chat): "); player.sendMessage(ChatColor.GRAY + " " + equalizing.get(player)); } }); } }
package com.epam.ta.reportportal.database.entity.settings; import com.google.common.base.MoreObjects; import java.io.Serializable; public class ServerEmailDetails implements Serializable { private Boolean enabled; private String host; private Integer port; private String protocol; private Boolean authEnabled; private Boolean starTlsEnabled; private Boolean sslEnabled; private String username; private String password; private String from; public ServerEmailDetails() { } public Boolean getEnabled() { return enabled; } public void setEnabled(Boolean enabled) { this.enabled = enabled; } public void setPort(Integer port) { this.port = port; } public Boolean getStarTlsEnabled() { return starTlsEnabled; } public void setStarTlsEnabled(Boolean starTlsEnabled) { this.starTlsEnabled = starTlsEnabled; } public Boolean getSslEnabled() { return sslEnabled; } public void setSslEnabled(Boolean sslEnabled) { this.sslEnabled = sslEnabled; } public void setHost(String host) { this.host = host; } public String getHost() { return host; } public void setPort(int port) { this.port = port; } public int getPort() { return port; } public void setProtocol(String protocol) { this.protocol = protocol.toLowerCase(); } public String getProtocol() { return protocol; } public void setUsername(String username) { this.username = username; } public String getUsername() { return username; } public void setPassword(String pass) { this.password = pass; } public String getPassword() { return password; } public void setAuthEnabled(Boolean authEnabled) { this.authEnabled = authEnabled; } public boolean isStarTlsEnabled() { return starTlsEnabled; } public void setStarTlsEnabled(boolean starTlsEnabled) { this.starTlsEnabled = starTlsEnabled; } public Boolean getAuthEnabled() { return authEnabled; } public boolean isSslEnabled() { return sslEnabled; } public void setSslEnabled(boolean sslEnabled) { this.sslEnabled = sslEnabled; } public String getFrom() { return from; } public void setFrom(String from) { this.from = from; } @Override public String toString() { return MoreObjects.toStringHelper(this).add("enabled", enabled).add("host", host).add("port", port).add("protocol", protocol) .add("authEnabled", authEnabled).add("starTlsEnabled", starTlsEnabled).add("sslEnabled", sslEnabled) .add("username", username).add("password", password).add("from", from).toString(); } }
package com.mmnaseri.utils.spring.data.domain.impl; import com.mmnaseri.utils.spring.data.domain.Modifier; import com.mmnaseri.utils.spring.data.domain.Operator; import com.mmnaseri.utils.spring.data.domain.Parameter; import com.mmnaseri.utils.spring.data.domain.RepositoryMetadata; import com.mmnaseri.utils.spring.data.query.*; import com.mmnaseri.utils.spring.data.query.impl.*; import com.mmnaseri.utils.spring.data.string.DocumentReader; import com.mmnaseri.utils.spring.data.string.impl.DefaultDocumentReader; import com.mmnaseri.utils.spring.data.tools.PropertyUtils; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Sort; import java.lang.reflect.Method; import java.util.*; import java.util.regex.Pattern; /** * @author Mohammad Milad Naseri (m.m.naseri@gmail.com) * @since 1.0 (9/17/15) */ public class QueryDescriptionExtractor { public static final String ALL_IGNORE_CASE_SUFFIX = "AllIgnoreCase"; public static final String IGNORE_CASE_SUFFIX = "IgnoreCase"; public static final String ASC_SUFFIX = "Asc"; public static final String DESC_SUFFIX = "Desc"; public QueryDescriptor extract(RepositoryMetadata repositoryMetadata, Method method) { String methodName = method.getName(); //check to see if the AllIgnoreCase flag is set boolean allIgnoreCase = methodName.endsWith(ALL_IGNORE_CASE_SUFFIX); //we need to unify method name afterwards methodName = allIgnoreCase ? methodName.substring(0, methodName.length() - ALL_IGNORE_CASE_SUFFIX.length()) : methodName; //create a document reader for processing method name final DocumentReader reader = new DefaultDocumentReader(methodName); //the first word in the method name is the function name String function = reader.read(Pattern.compile("^[a-z]+")); if (function == null) { throw new IllegalStateException("Malformed query method name: " + method); } //if the method name is one of the following, it is a simple read, and no function is required if (Arrays.asList("read", "find", "query", "get", "load", "select").contains(function)) { function = null; } //this is the limit set on the number of items being returned int limit = 0; //this is the flag that determines whether or not the result should be sifted for distinct values boolean distinct = false; //this is the extractor used for getting paging data final PageParameterExtractor pageExtractor; //this is the extractor used for getting sorting data final SortParameterExtractor sortExtractor; //these are decision branches, each of which denoting an AND clause final List<List<Parameter>> branches = new ArrayList<List<Parameter>>(); //if the method name simply was the function name, no metadata can be extracted if (!reader.hasMore()) { pageExtractor = null; sortExtractor = null; } else { //scan for words prior to 'By' while (reader.hasMore() && !reader.has("By")) { //if the next word is Top, then we are setting a limit if (reader.has("First")) { if (limit > 0) { throw new IllegalStateException("There is already a limit of " + limit + " specified for this query: " + method); } reader.expect("First"); if (reader.has("\\d+")) { limit = Integer.parseInt(reader.expect("\\d+")); } else { limit = 1; } continue; } else if (reader.has("Top")) { if (limit > 0) { throw new IllegalStateException("There is already a limit of " + limit + " specified for this query: " + method); } reader.expect("Top"); limit = Integer.parseInt(reader.expect("\\d+")); continue; } else if (reader.has("Distinct")) { //if the next word is 'Distinct', we are saying we should return distinct results if (distinct) { throw new IllegalStateException("You have already stated that this query should return distinct items: " + method); } distinct = true; } //we read the words until we reach "By". reader.expect("[A-Z][a-z]+"); } reader.expect("By"); if (!reader.hasMore()) { throw new IllegalStateException("Query method name cannot end with `By`"); } int index = 0; branches.add(new LinkedList<Parameter>()); while (reader.hasMore()) { //read a full expression final Parameter parameter; String expression = reader.expect("(.*?)(And[A-Z]|Or[A-Z]|$)"); if (expression.matches(".*?(And|Or)[A-Z]")) { //if the expression ended in And/Or, we need to put the one extra character we scanned back //we scan one extra character because we don't want anything like "Order" to be mistaken for "Or" reader.backtrack(1); expression = expression.substring(0, expression.length() - 1); } //if the expression ended in Or, this is the end of this branch boolean branchEnd = expression.endsWith("Or"); //if the expression contains an OrderBy, it is not only the end of the branch, but also the end of the query boolean expressionEnd = expression.matches(".+[a-z]OrderBy[A-Z].+"); if (expressionEnd) { //if that is the case, we need to put back the entirety of the order by clause int length = expression.length(); expression = expression.replaceFirst("^(.+[a-z])OrderBy[A-Z].+$", "$1"); length -= expression.length(); reader.backtrack(length); } final Set<Modifier> modifiers = new HashSet<Modifier>(); if (expression.endsWith(IGNORE_CASE_SUFFIX)) { //if the expression ended in IgnoreCase, we need to strip that off modifiers.add(Modifier.IGNORE_CASE); expression = expression.substring(0, expression.length() - IGNORE_CASE_SUFFIX.length()); } else if (allIgnoreCase) { //if we had already set "AllIgnoreCase", we will still add the modifier modifiers.add(Modifier.IGNORE_CASE); } //if the expression ends in And/Or, we expect there to be more if (expression.matches(".*?(And|Or)$")) { if (!reader.hasMore()) { throw new IllegalStateException("Expected more tokens to follow AND/OR operator"); } } expression = expression.replaceFirst("(And|Or)$", ""); String property = null; Operator operator = null; //let's find out the operator that covers the longest suffix of the operation for (int i = 1; i < expression.length(); i++) { operator = Operator.getBySuffix(expression.substring(i)); if (operator != null) { property = expression.substring(0, i); break; } } //if no operator was found, it is the implied "IS" operator if (operator == null) { property = expression; operator = Operator.IS; } //if the property was empty, the operator name was the property name itself if (property.isEmpty()) { property = expression; operator = Operator.IS; } //let's get the property descriptor final PropertyDescriptor propertyDescriptor = PropertyUtils.getPropertyDescriptor(repositoryMetadata.getEntityType(), property); property = propertyDescriptor.getPath(); //we need to match the method parameters with the operands for the designated operator final int[] indices = new int[operator.getOperands()]; for (int i = 0; i < operator.getOperands(); i++) { if (index >= method.getParameterTypes().length) { throw new IllegalStateException("Expected to see parameter with index " + index); } if (!propertyDescriptor.getType().isAssignableFrom(method.getParameterTypes()[index])) { throw new IllegalStateException("Expected parameter " + index + " on method " + methodName + " to be a descendant of " + propertyDescriptor.getType()); } indices[i] = index ++; } //create a parameter definition for the given expression parameter = new ImmutableParameter(property, modifiers, indices, operator); //get the current branch final List<Parameter> currentBranch = branches.get(branches.size() - 1); //add this parameter to the latest branch currentBranch.add(parameter); //if the branch has ended with "OR", we set up a new branch if (branchEnd) { branches.add(new LinkedList<Parameter>()); } //if this is the end of expression, so we need to jump out if (expressionEnd) { break; } } final com.mmnaseri.utils.spring.data.query.Sort sort; //let's figure out if there is a sort requirement embedded in the query definition if (reader.read("OrderBy") != null) { final List<Order> orders = new ArrayList<Order>(); while (reader.hasMore()) { String expression = reader.expect(".*?(Asc|Desc)"); final SortDirection direction; if (expression.endsWith(ASC_SUFFIX)) { direction = SortDirection.ASCENDING; expression = expression.substring(0, expression.length() - ASC_SUFFIX.length()); } else { direction = SortDirection.DESCENDING; expression = expression.substring(0, expression.length() - DESC_SUFFIX.length()); } final PropertyDescriptor propertyDescriptor = PropertyUtils.getPropertyDescriptor(repositoryMetadata.getEntityType(), expression); if (!Comparable.class.isAssignableFrom(propertyDescriptor.getType())) { throw new IllegalStateException("Sort property " + propertyDescriptor.getPath() + " is not comparable " + method); } final Order order = new ImmutableOrder(direction, propertyDescriptor.getPath(), NullHandling.DEFAULT); orders.add(order); } sort = new ImmutableSort(orders); } else { sort = null; } if (reader.hasMore()) { throw new IllegalStateException("Too many tokens in the query name: " + method); } if (method.getParameterTypes().length == index) { pageExtractor = null; sortExtractor = sort == null ? null : new WrappedSortParameterExtractor(sort); } else if (method.getParameterTypes().length == index + 1) { if (Pageable.class.isAssignableFrom(method.getParameterTypes()[index])) { pageExtractor = new PageablePageParameterExtractor(index); sortExtractor = sort == null ? new PageableSortParameterExtractor(index) : new WrappedSortParameterExtractor(sort); } else if (Sort.class.isAssignableFrom(method.getParameterTypes()[index])) { if (sort != null) { throw new IllegalStateException("You cannot specify both an order-by clause and a dynamic ordering"); } pageExtractor = null; sortExtractor = new DirectSortParameterExtractor(index); } else { throw new IllegalStateException("Invalid last argument: expected paging or sorting " + method); } } else { throw new IllegalStateException("Too many parameters declared for query method " + method); } } return new DefaultQueryDescriptor(distinct, function, limit, pageExtractor, sortExtractor, branches, repositoryMetadata); } }
package fi.otavanopisto.kuntaapi.server.integrations.ptv; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.logging.Logger; import javax.inject.Inject; import fi.otavanopisto.kuntaapi.server.id.ElectronicServiceChannelId; import fi.otavanopisto.kuntaapi.server.id.IdController; import fi.otavanopisto.kuntaapi.server.id.OrganizationId; import fi.otavanopisto.kuntaapi.server.id.PhoneChannelId; import fi.otavanopisto.kuntaapi.server.id.PrintableFormChannelId; import fi.otavanopisto.kuntaapi.server.id.ServiceId; import fi.otavanopisto.kuntaapi.server.id.ServiceLocationChannelId; import fi.otavanopisto.kuntaapi.server.id.WebPageChannelId; import fi.otavanopisto.kuntaapi.server.integrations.KuntaApiConsts; import fi.otavanopisto.kuntaapi.server.rest.model.Address; import fi.otavanopisto.kuntaapi.server.rest.model.ElectronicChannel; import fi.otavanopisto.kuntaapi.server.rest.model.LocalizedValue; import fi.otavanopisto.kuntaapi.server.rest.model.OntologyItem; import fi.otavanopisto.kuntaapi.server.rest.model.Organization; import fi.otavanopisto.kuntaapi.server.rest.model.PhoneChannel; import fi.otavanopisto.kuntaapi.server.rest.model.PrintableFormChannel; import fi.otavanopisto.kuntaapi.server.rest.model.Service; import fi.otavanopisto.kuntaapi.server.rest.model.ServiceChannelAttachment; import fi.otavanopisto.kuntaapi.server.rest.model.ServiceHour; import fi.otavanopisto.kuntaapi.server.rest.model.ServiceLocationChannel; import fi.otavanopisto.kuntaapi.server.rest.model.SupportContact; import fi.otavanopisto.kuntaapi.server.rest.model.WebPage; import fi.otavanopisto.kuntaapi.server.rest.model.WebPageChannel; import fi.otavanopisto.restfulptv.client.model.FintoItem; import fi.otavanopisto.restfulptv.client.model.LanguageItem; import fi.otavanopisto.restfulptv.client.model.LocalizedListItem; import fi.otavanopisto.restfulptv.client.model.Support; public abstract class AbstractPtvProvider { private static final String ONTOLOGY_SYSTEM_FINTO = "FINTO"; @Inject private Logger logger; @Inject private IdController idController; protected List<LocalizedValue> translateLocalizedItems(List<LocalizedListItem> items) { if (items != null && !items.isEmpty()) { List<LocalizedValue> result = new ArrayList<>(); for (LocalizedListItem item : items) { LocalizedValue localizedValue = new LocalizedValue(); localizedValue.setLanguage(item.getLanguage()); localizedValue.setValue(item.getValue()); localizedValue.setType(item.getType()); result.add(localizedValue); } return result; } return Collections.emptyList(); } protected List<LocalizedValue> translateLanguageItems(List<LanguageItem> items) { if (items != null && !items.isEmpty()) { List<LocalizedValue> result = new ArrayList<>(); for (LanguageItem item : items) { LocalizedValue localizedValue = new LocalizedValue(); localizedValue.setLanguage(item.getLanguage()); localizedValue.setValue(item.getValue()); result.add(localizedValue); } return result; } return Collections.emptyList(); } protected Organization translateOrganization(fi.otavanopisto.restfulptv.client.model.Organization ptvOrganiztion) { if (ptvOrganiztion == null) { return null; } OrganizationId kuntaApiId = translateOrganizationId(ptvOrganiztion.getId()); if (kuntaApiId == null) { return null; } Organization organization = new Organization(); organization.setId(kuntaApiId.getId()); organization.setBusinessCode(ptvOrganiztion.getBusinessCode()); organization.setBusinessName(ptvOrganiztion.getBusinessName()); return organization; } protected List<Service> translateServices(List<fi.otavanopisto.restfulptv.client.model.Service> ptvServices) { if (ptvServices == null) { return Collections.emptyList(); } List<Service> result = new ArrayList<>(ptvServices.size()); for (fi.otavanopisto.restfulptv.client.model.Service ptvElectronicChannel : ptvServices) { Service service = translateService(ptvElectronicChannel); if (service != null) { result.add(service); } } return result; } protected Service translateService(fi.otavanopisto.restfulptv.client.model.Service ptvService) { if (ptvService == null) { return null; } ServiceId ptvId = new ServiceId(PtvConsts.IDENTIFIFER_NAME, ptvService.getId()); ServiceId kuntaApiId = idController.translateServiceId(ptvId, KuntaApiConsts.IDENTIFIER_NAME); if (kuntaApiId == null) { logger.severe(String.format("Could not translate service %s into Kunta API id", ptvId.getId())); return null; } Service result = new Service(); result.setAdditionalInformations(translateLocalizedItems(ptvService.getAdditionalInformations())); result.setChargeType(ptvService.getChargeType()); result.setCoverageType(ptvService.getCoverageType()); result.setDescriptions(translateLocalizedItems(ptvService.getDescriptions())); result.setId(kuntaApiId.getId()); result.setIndustrialClasses(translateFintoItems(ptvService.getIndustrialClasses())); result.setKeywords(ptvService.getKeywords()); result.setLanguages(ptvService.getLanguages()); result.setLifeEvents(translateFintoItems(ptvService.getLifeEvents())); result.setMunicipalities(ptvService.getMunicipalities()); result.setNames(translateLocalizedItems(ptvService.getNames())); result.setOntologyTerms(translateFintoItems(ptvService.getOntologyTerms())); result.setPublishingStatus(ptvService.getPublishingStatus()); result.setRequirements(translateLanguageItems(ptvService.getRequirements())); result.setServiceClasses(translateFintoItems(ptvService.getServiceClasses())); result.setStatutoryDescriptionId(ptvService.getStatutoryDescriptionId()); result.setTargetGroups(translateFintoItems(ptvService.getTargetGroups())); result.setType(ptvService.getType()); result.setWebPages(translateWebPages(ptvService.getWebPages())); return result; } protected OntologyItem translateFintoItem(FintoItem ptvFintoItem) { if (ptvFintoItem == null) { return null; } OntologyItem result = new OntologyItem(); result.setCode(ptvFintoItem.getCode()); result.setId(ptvFintoItem.getId()); result.setName(ptvFintoItem.getName()); result.setOntologyType(ptvFintoItem.getOntologyType()); result.setParentId(ptvFintoItem.getParentId()); result.setParentUri(ptvFintoItem.getParentUri()); result.setSystem(ONTOLOGY_SYSTEM_FINTO); result.setUri(ptvFintoItem.getUri()); return result; } protected List<OntologyItem> translateFintoItems(List<FintoItem> ptvFintoItems) { if (ptvFintoItems == null) { return Collections.emptyList(); } List<OntologyItem> result = new ArrayList<>(ptvFintoItems.size()); for (FintoItem fintoItem : ptvFintoItems) { OntologyItem ontologyItem = translateFintoItem(fintoItem); if (ontologyItem != null) { result.add(ontologyItem); } } return result; } protected List<ElectronicChannel> translateElectronicChannels(List<fi.otavanopisto.restfulptv.client.model.ElectronicChannel> ptvElectronicChannels) { if (ptvElectronicChannels == null) { return Collections.emptyList(); } List<ElectronicChannel> result = new ArrayList<>(); for (fi.otavanopisto.restfulptv.client.model.ElectronicChannel ptvElectronicChannel : ptvElectronicChannels) { ElectronicChannel electronicChannel = translateElectronicChannel(ptvElectronicChannel); if (electronicChannel != null) { result.add(electronicChannel); } } return result; } protected List<PhoneChannel> translatePhoneChannels(List<fi.otavanopisto.restfulptv.client.model.PhoneChannel> ptvPhoneChannels) { if (ptvPhoneChannels == null) { return Collections.emptyList(); } List<PhoneChannel> result = new ArrayList<>(); for (fi.otavanopisto.restfulptv.client.model.PhoneChannel ptvPhoneChannel : ptvPhoneChannels) { PhoneChannel phoneChannel = translatePhoneChannel(ptvPhoneChannel); if (phoneChannel != null) { result.add(phoneChannel); } } return result; } protected List<PrintableFormChannel> translatePrintableFormChannels(List<fi.otavanopisto.restfulptv.client.model.PrintableFormChannel> ptvPrintableFormChannels) { if (ptvPrintableFormChannels == null) { return Collections.emptyList(); } List<PrintableFormChannel> result = new ArrayList<>(); for (fi.otavanopisto.restfulptv.client.model.PrintableFormChannel ptvPrintableFormChannel : ptvPrintableFormChannels) { PrintableFormChannel printableFormChannel = translatePrintableFormChannel(ptvPrintableFormChannel); if (printableFormChannel != null) { result.add(printableFormChannel); } } return result; } protected List<ServiceLocationChannel> translateServiceLocationChannels(List<fi.otavanopisto.restfulptv.client.model.ServiceLocationChannel> ptvServiceLocationChannels) { if (ptvServiceLocationChannels == null) { return Collections.emptyList(); } List<ServiceLocationChannel> result = new ArrayList<>(); for (fi.otavanopisto.restfulptv.client.model.ServiceLocationChannel ptvServiceLocationChannel : ptvServiceLocationChannels) { ServiceLocationChannel serviceLocationChannel = translateServiceLocationChannel(ptvServiceLocationChannel); if (serviceLocationChannel != null) { result.add(serviceLocationChannel); } } return result; } protected List<WebPageChannel> translateWebPageChannels(List<fi.otavanopisto.restfulptv.client.model.WebPageChannel> ptvWebPageChannels) { if (ptvWebPageChannels == null) { return Collections.emptyList(); } List<WebPageChannel> result = new ArrayList<>(); for (fi.otavanopisto.restfulptv.client.model.WebPageChannel ptvWebPageChannel : ptvWebPageChannels) { WebPageChannel webPageChannel = translateWebPageChannel(ptvWebPageChannel); if (webPageChannel != null) { result.add(webPageChannel); } } return result; } protected ElectronicChannel translateElectronicChannel(fi.otavanopisto.restfulptv.client.model.ElectronicChannel ptvElectronicChannel) { if (ptvElectronicChannel == null) { return null; } OrganizationId organizationKuntaApiId = translateOrganizationId(ptvElectronicChannel.getOrganizationId()); if (organizationKuntaApiId == null) { return null; } ElectronicServiceChannelId channelPtvId = new ElectronicServiceChannelId(PtvConsts.IDENTIFIFER_NAME, ptvElectronicChannel.getId()); ElectronicServiceChannelId channelKuntaApiId = idController.translateElectronicServiceChannelId(channelPtvId, KuntaApiConsts.IDENTIFIER_NAME); if (channelKuntaApiId == null) { logger.severe(String.format("Could not translate electronic channel id %s into Kunta API id", channelPtvId.getId())); return null; } ElectronicChannel result = new ElectronicChannel(); result.setAttachments(translateAttachments(ptvElectronicChannel.getAttachments())); result.setDescriptions(translateLocalizedItems(ptvElectronicChannel.getDescriptions())); result.setId(channelKuntaApiId.getId()); result.setLanguages(ptvElectronicChannel.getLanguages()); result.setNames(translateLocalizedItems(ptvElectronicChannel.getNames())); result.setOrganizationId(organizationKuntaApiId.getId()); result.setPublishingStatus(ptvElectronicChannel.getPublishingStatus()); result.setRequiresAuthentication(ptvElectronicChannel.getRequiresAuthentication()); result.setRequiresSignature(ptvElectronicChannel.getRequiresSignature()); result.setServiceHours(translateServiceHours(ptvElectronicChannel.getServiceHours())); result.setSignatureQuantity(ptvElectronicChannel.getSignatureQuantity()); result.setSupportContacts(translateSupportContacts(ptvElectronicChannel.getSupportContacts())); result.setType(ptvElectronicChannel.getType()); result.setUrls(translateLanguageItems(ptvElectronicChannel.getUrls())); result.setWebPages(translateWebPages(ptvElectronicChannel.getWebPages())); return result; } protected PhoneChannel translatePhoneChannel(fi.otavanopisto.restfulptv.client.model.PhoneChannel ptvPhoneChannel) { if (ptvPhoneChannel == null) { return null; } OrganizationId organizationKuntaApiId = translateOrganizationId(ptvPhoneChannel.getOrganizationId()); if (organizationKuntaApiId == null) { return null; } PhoneChannelId channelPtvId = new PhoneChannelId(PtvConsts.IDENTIFIFER_NAME, ptvPhoneChannel.getId()); PhoneChannelId channelKuntaApiId = idController.translatePhoneServiceChannelId(channelPtvId, KuntaApiConsts.IDENTIFIER_NAME); if (channelKuntaApiId == null) { logger.severe(String.format("Could not translate phone channel id %s into Kunta API id", channelPtvId.getId())); return null; } PhoneChannel result = new PhoneChannel(); result.setId(channelKuntaApiId.getId()); result.setType(ptvPhoneChannel.getType()); result.setOrganizationId(organizationKuntaApiId.getId()); result.setNames(translateLocalizedItems(ptvPhoneChannel.getNames())); result.setDescriptions(translateLocalizedItems(ptvPhoneChannel.getDescriptions())); result.setPhoneType(ptvPhoneChannel.getPhoneType()); result.setChargeTypes(ptvPhoneChannel.getChargeTypes()); result.setSupportContacts(translateSupportContacts(ptvPhoneChannel.getSupportContacts())); result.setPhoneNumbers(translateLanguageItems(ptvPhoneChannel.getPhoneNumbers())); result.setLanguages(ptvPhoneChannel.getLanguages()); result.setPhoneChargeDescriptions(translateLanguageItems(ptvPhoneChannel.getPhoneChargeDescriptions())); result.setWebPages(translateWebPages(ptvPhoneChannel.getWebPages())); result.setServiceHours(translateServiceHours(ptvPhoneChannel.getServiceHours())); result.setPublishingStatus(ptvPhoneChannel.getPublishingStatus()); return result; } protected PrintableFormChannel translatePrintableFormChannel(fi.otavanopisto.restfulptv.client.model.PrintableFormChannel ptvPrintableFormChannel) { if (ptvPrintableFormChannel == null) { return null; } OrganizationId organizationKuntaApiId = translateOrganizationId(ptvPrintableFormChannel.getOrganizationId()); if (organizationKuntaApiId == null) { return null; } PrintableFormChannelId channelPtvId = new PrintableFormChannelId(PtvConsts.IDENTIFIFER_NAME, ptvPrintableFormChannel.getId()); PrintableFormChannelId channelKuntaApiId = idController.translatePrintableFormServiceChannelId(channelPtvId, KuntaApiConsts.IDENTIFIER_NAME); if (channelKuntaApiId == null) { logger.severe(String.format("Could not translate printableForm channel id %s into Kunta API id", channelPtvId.getId())); return null; } PrintableFormChannel result = new PrintableFormChannel(); result.setId(channelKuntaApiId.getId()); result.setType(ptvPrintableFormChannel.getType()); result.setOrganizationId(organizationKuntaApiId.getId()); result.setNames(translateLocalizedItems(ptvPrintableFormChannel.getNames())); result.setDescriptions(translateLocalizedItems(ptvPrintableFormChannel.getDescriptions())); result.setFormIdentifier(ptvPrintableFormChannel.getFormIdentifier()); result.setFormReceiver(ptvPrintableFormChannel.getFormReceiver()); result.setSupportContacts(translateSupportContacts(ptvPrintableFormChannel.getSupportContacts())); result.setDeliveryAddress(translateAddress(ptvPrintableFormChannel.getDeliveryAddress())); result.setChannelUrls(translateLocalizedItems(ptvPrintableFormChannel.getChannelUrls())); result.setLanguages(ptvPrintableFormChannel.getLanguages()); result.setDeliveryAddressDescriptions(translateLanguageItems(ptvPrintableFormChannel.getDeliveryAddressDescriptions())); result.setAttachments(translateAttachments(ptvPrintableFormChannel.getAttachments())); result.setWebPages(translateWebPages(ptvPrintableFormChannel.getWebPages())); result.setServiceHours(translateServiceHours(ptvPrintableFormChannel.getServiceHours())); result.setPublishingStatus(ptvPrintableFormChannel.getPublishingStatus()); return result; } protected ServiceLocationChannel translateServiceLocationChannel(fi.otavanopisto.restfulptv.client.model.ServiceLocationChannel ptvServiceLocationChannel) { if (ptvServiceLocationChannel == null) { return null; } OrganizationId organizationKuntaApiId = translateOrganizationId(ptvServiceLocationChannel.getOrganizationId()); if (organizationKuntaApiId == null) { return null; } ServiceLocationChannelId channelPtvId = new ServiceLocationChannelId(PtvConsts.IDENTIFIFER_NAME, ptvServiceLocationChannel.getId()); ServiceLocationChannelId channelKuntaApiId = idController.translateServiceLocationChannelId(channelPtvId, KuntaApiConsts.IDENTIFIER_NAME); if (channelKuntaApiId == null) { logger.severe(String.format("Could not translate serviceLocation channel id %s into Kunta API id", channelPtvId.getId())); return null; } ServiceLocationChannel result = new ServiceLocationChannel(); result.setId(channelKuntaApiId.getId()); result.setType(ptvServiceLocationChannel.getType()); result.setOrganizationId(organizationKuntaApiId.getId()); result.setNames(translateLocalizedItems(ptvServiceLocationChannel.getNames())); result.setDescriptions(translateLocalizedItems(ptvServiceLocationChannel.getDescriptions())); result.setServiceAreaRestricted(ptvServiceLocationChannel.getServiceAreaRestricted()); result.setSupportContacts(translateSupportContacts(ptvServiceLocationChannel.getSupportContacts())); result.setEmail(ptvServiceLocationChannel.getEmail()); result.setPhone(ptvServiceLocationChannel.getPhone()); result.setLanguages(ptvServiceLocationChannel.getLanguages()); result.setFax(ptvServiceLocationChannel.getFax()); result.setLatitude(ptvServiceLocationChannel.getLatitude()); result.setLongitude(ptvServiceLocationChannel.getLongitude()); result.setCoordinateSystem(ptvServiceLocationChannel.getCoordinateSystem()); result.setCoordinatesSetManually(ptvServiceLocationChannel.getCoordinatesSetManually()); result.setPhoneServiceCharge(ptvServiceLocationChannel.getPhoneServiceCharge()); result.setWebPages(translateWebPages(ptvServiceLocationChannel.getWebPages())); result.setServiceAreas(ptvServiceLocationChannel.getServiceAreas()); result.setPhoneChargeDescriptions(translateLanguageItems(ptvServiceLocationChannel.getPhoneChargeDescriptions())); result.setAddresses(translateAddresses(ptvServiceLocationChannel.getAddresses())); result.setChargeTypes(ptvServiceLocationChannel.getChargeTypes()); result.setServiceHours(translateServiceHours(ptvServiceLocationChannel.getServiceHours())); result.setPublishingStatus(ptvServiceLocationChannel.getPublishingStatus()); return result; } protected WebPageChannel translateWebPageChannel(fi.otavanopisto.restfulptv.client.model.WebPageChannel ptvWebPageChannel) { if (ptvWebPageChannel == null) { return null; } OrganizationId organizationKuntaApiId = translateOrganizationId(ptvWebPageChannel.getOrganizationId()); if (organizationKuntaApiId == null) { return null; } WebPageChannelId channelPtvId = new WebPageChannelId(PtvConsts.IDENTIFIFER_NAME, ptvWebPageChannel.getId()); WebPageChannelId channelKuntaApiId = idController.translateWebPageServiceChannelId(channelPtvId, KuntaApiConsts.IDENTIFIER_NAME); if (channelKuntaApiId == null) { logger.severe(String.format("Could not translate webPage channel id %s into Kunta API id", channelPtvId.getId())); return null; } WebPageChannel result = new WebPageChannel(); result.setId(channelKuntaApiId.getId()); result.setType(ptvWebPageChannel.getType()); result.setOrganizationId(organizationKuntaApiId.getId()); result.setNames(translateLocalizedItems(ptvWebPageChannel.getNames())); result.setDescriptions(translateLocalizedItems(ptvWebPageChannel.getDescriptions())); result.setUrls(translateLanguageItems(ptvWebPageChannel.getUrls())); result.setAttachments(translateAttachments(ptvWebPageChannel.getAttachments())); result.setSupportContacts(translateSupportContacts(ptvWebPageChannel.getSupportContacts())); result.setLanguages(ptvWebPageChannel.getLanguages()); result.setWebPages(translateWebPages(ptvWebPageChannel.getWebPages())); result.setServiceHours(translateServiceHours(ptvWebPageChannel.getServiceHours())); result.setPublishingStatus(ptvWebPageChannel.getPublishingStatus()); return result; } protected List<WebPage> translateWebPages(List<fi.otavanopisto.restfulptv.client.model.WebPage> ptvWebPages) { if (ptvWebPages == null) { return Collections.emptyList(); } List<WebPage> result = new ArrayList<>(ptvWebPages.size()); for (fi.otavanopisto.restfulptv.client.model.WebPage ptvWebPage : ptvWebPages) { WebPage webPage = translateWebPage(ptvWebPage); if (webPage != null) { result.add(webPage); } } return result; } protected WebPage translateWebPage(fi.otavanopisto.restfulptv.client.model.WebPage ptvWebPage) { if (ptvWebPage == null) { return null; } WebPage webPage = new WebPage(); webPage.setLanguage(ptvWebPage.getLanguage()); webPage.setType(ptvWebPage.getType()); webPage.setUrl(ptvWebPage.getUrl()); webPage.setValue(ptvWebPage.getValue()); webPage.setDescription(ptvWebPage.getDescription()); return webPage; } private OrganizationId translateOrganizationId(String ptvOrganizationId) { OrganizationId organizationPtvId = new OrganizationId(PtvConsts.IDENTIFIFER_NAME, ptvOrganizationId); OrganizationId organizationKuntaApiId = idController.translateOrganizationId(organizationPtvId, KuntaApiConsts.IDENTIFIER_NAME); if (organizationKuntaApiId == null) { logger.severe(String.format("Could not translate organization id %s into Kunta API id", organizationPtvId.getId())); return null; } return organizationKuntaApiId; } private List<Address> translateAddresses(List<fi.otavanopisto.restfulptv.client.model.Address> ptvAddresses) { if (ptvAddresses == null) { return Collections.emptyList(); } List<Address> result = new ArrayList<>(ptvAddresses.size()); for (fi.otavanopisto.restfulptv.client.model.Address ptvAddress : ptvAddresses) { Address address = translateAddress(ptvAddress); if (address != null) { result.add(address); } } return result; } private List<ServiceHour> translateServiceHours(List<fi.otavanopisto.restfulptv.client.model.ServiceHour> ptvServiceHours) { if (ptvServiceHours == null) { return Collections.emptyList(); } List<ServiceHour> result = new ArrayList<>(ptvServiceHours.size()); for (fi.otavanopisto.restfulptv.client.model.ServiceHour ptvServiceHour : ptvServiceHours) { ServiceHour serviceHour = translateServiceHour(ptvServiceHour); if (serviceHour != null) { result.add(serviceHour); } } return result; } private ServiceHour translateServiceHour(fi.otavanopisto.restfulptv.client.model.ServiceHour ptvServiceHour) { if (ptvServiceHour == null) { return null; } ServiceHour result = new ServiceHour(); result.setAdditionalInformation(translateLanguageItems(ptvServiceHour.getAdditionalInformation())); result.setCloses(ptvServiceHour.getCloses()); result.setDays(ptvServiceHour.getDays()); result.setOpens(ptvServiceHour.getOpens()); result.setStatus(ptvServiceHour.getStatus()); result.setType(ptvServiceHour.getType()); result.setValidFrom(ptvServiceHour.getValidFrom()); result.setValidTo(ptvServiceHour.getValidTo()); return result; } private List<SupportContact> translateSupportContacts(List<Support> ptvSupportContacts) { if (ptvSupportContacts == null) { return Collections.emptyList(); } List<SupportContact> result = new ArrayList<>(ptvSupportContacts.size()); for (Support ptvSupportContact : ptvSupportContacts) { SupportContact supportContact = translateSupportContact(ptvSupportContact); if (supportContact != null) { result.add(supportContact); } } return result; } private SupportContact translateSupportContact(Support ptvSupport) { if (ptvSupport == null) { return null; } SupportContact support = new SupportContact(); support.setEmail(ptvSupport.getEmail()); support.setLanguage(ptvSupport.getLanguage()); support.setPhone(ptvSupport.getPhone()); support.setPhoneChargeDescription(ptvSupport.getPhoneChargeDescription()); support.setServiceChargeTypes(ptvSupport.getServiceChargeTypes()); return support; } private List<ServiceChannelAttachment> translateAttachments(List<fi.otavanopisto.restfulptv.client.model.Attachment> ptvAttachments) { if (ptvAttachments == null) { return Collections.emptyList(); } List<ServiceChannelAttachment> result = new ArrayList<>(ptvAttachments.size()); for (fi.otavanopisto.restfulptv.client.model.Attachment ptvAttachment : ptvAttachments) { ServiceChannelAttachment attachment = translateAttachment(ptvAttachment); if (attachment != null) { result.add(attachment); } } return result; } private Address translateAddress(fi.otavanopisto.restfulptv.client.model.Address address) { if (address == null) { return null; } Address result = new Address(); result.setType(address.getType()); result.setPostOfficeBox(address.getPostOfficeBox()); result.setPostalCode(address.getPostalCode()); result.setPostOffice(address.getPostOffice()); result.setStreetAddress(translateLanguageItems(address.getStreetAddress())); result.setMunicipality(address.getMunicipality()); result.setCountry(address.getCountry()); result.setQualifier(address.getQualifier()); result.setAdditionalInformations(translateLanguageItems(address.getAdditionalInformations())); return result; } private ServiceChannelAttachment translateAttachment(fi.otavanopisto.restfulptv.client.model.Attachment ptvAttachment) { if (ptvAttachment == null) { return null; } ServiceChannelAttachment result = new ServiceChannelAttachment(); result.setDescription(ptvAttachment.getDescription()); result.setLanguage(ptvAttachment.getLanguage()); result.setName(ptvAttachment.getName()); result.setType(ptvAttachment.getType()); result.setUrl(ptvAttachment.getUrl()); return result; } }
package io.jenkins.plugins.generate.parsers; import io.jenkins.plugins.generate.PluginDataParser; import io.jenkins.plugins.models.Plugin; import io.jenkins.plugins.models.SecurityWarning; import io.jenkins.plugins.models.SecurityWarningVersion; import org.json.JSONArray; import org.json.JSONObject; import java.util.*; import java.util.stream.Collectors; import java.util.stream.StreamSupport; public class SecurityWarningsPluginDataParser implements PluginDataParser { private final Map<String, List<JSONObject>> nameToWarningsMap; public SecurityWarningsPluginDataParser(JSONObject updateCenterJson) { final JSONArray warningsJson = updateCenterJson.getJSONArray("warnings"); nameToWarningsMap = StreamSupport.stream(warningsJson.spliterator(), false) .map(obj -> (JSONObject)obj) .filter(warning -> warning.getString("type").equalsIgnoreCase("plugin")) .collect(Collectors.toMap( warning -> warning.getString("name"), warning -> { final List<JSONObject> list = new ArrayList<>(); list.add(warning); return list; }, (o, n) -> { o.addAll(n); return o; })); } @Override public void parse(JSONObject pluginJson, Plugin plugin) { final String name = pluginJson.getString("name"); final String version = pluginJson.getString("version"); if (nameToWarningsMap.containsKey(name)) { plugin.setSecurityWarnings(nameToWarningsMap.get(name).stream() .map(warningJson -> { final List<SecurityWarningVersion> versions = StreamSupport.stream(warningJson.getJSONArray("versions").spliterator(), false) .map(obj -> (JSONObject) obj) .map(versionJson -> new SecurityWarningVersion( versionJson.optString("firstVersion", null), versionJson.optString("lastVersion", null) )) .collect(Collectors.toList()); final boolean active = StreamSupport.stream(warningJson.getJSONArray("versions").spliterator(), false) .map(obj -> (JSONObject) obj) .map(versionJson -> version.matches(versionJson.getString("pattern"))) .reduce(false, (a, b) -> a || b); return new SecurityWarning( warningJson.getString("id"), warningJson.getString("message"), warningJson.getString("url"), active, versions); }).collect(Collectors.toList())); } } }
package jp.sf.fess.solr.plugin.analysis.monitor; import java.io.IOException; import java.lang.reflect.Field; import java.util.HashMap; import java.util.Map; import jp.sf.fess.solr.plugin.util.MonitoringFileUtil; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.util.ResourceLoader; import org.apache.lucene.analysis.util.ResourceLoaderAware; import org.apache.lucene.analysis.util.TokenFilterFactory; import org.apache.lucene.util.AttributeSource; public class MonitoringTokenFilterFactory extends TokenFilterFactory implements ResourceLoaderAware { private static final boolean VERBOSE = true; // debug protected ResourceLoader loader; protected TokenFilterFactory baseTokenFilterFactory; protected final Map<String, String> baseArgs; protected String baseClass; protected MonitoringFileTask monitoringFileTask; protected volatile long factoryTimestamp; protected Field attributesField; protected Field attributeImplsField; protected Field currentStateField; public MonitoringTokenFilterFactory(final Map<String, String> args) { super(args); try { attributesField = AttributeSource.class .getDeclaredField("attributes"); attributesField.setAccessible(true); attributeImplsField = AttributeSource.class .getDeclaredField("attributeImpls"); attributeImplsField.setAccessible(true); currentStateField = AttributeSource.class .getDeclaredField("currentState"); currentStateField.setAccessible(true); } catch (final Exception e) { throw new IllegalStateException("Failed to load fields.", e); } baseArgs = new HashMap<String, String>(args); } @Override public void inform(final ResourceLoader loader) throws IOException { this.loader = loader; final Map<String, String> monitorArgs = MonitoringFileUtil .createMonitorArgs(baseArgs); baseClass = MonitoringFileUtil.initBaseArgs(baseArgs, luceneMatchVersion.toString()); baseTokenFilterFactory = MonitoringFileUtil.createFactory(baseClass, baseArgs, loader); factoryTimestamp = System.currentTimeMillis(); monitoringFileTask = MonitoringFileUtil.createMonitoringFileTask( monitorArgs, loader, new MonitoringFileTask.Callback() { @Override public void process() { baseTokenFilterFactory = MonitoringFileUtil .createFactory(baseClass, baseArgs, loader); factoryTimestamp = System.currentTimeMillis(); } }); if (baseTokenFilterFactory instanceof ResourceLoaderAware) { ((ResourceLoaderAware) baseTokenFilterFactory).inform(loader); } } @Override public TokenStream create(final TokenStream input) { return new TokenStreamWrapper(input); } public class TokenStreamWrapper extends TokenStream { protected TokenStream tokenStream; protected TokenStream input; protected long tokenStreamTimestamp; TokenStreamWrapper(final TokenStream input) { super(); this.input = input; tokenStream = createTokenStream(input); } @Override public void close() throws IOException { tokenStream.close(); } @Override public void reset() throws IOException { if (factoryTimestamp > tokenStreamTimestamp) { if (VERBOSE) { System.out.println("Update TokenStream/" + baseClass + " (" + tokenStreamTimestamp + "," + factoryTimestamp + ")"); } tokenStream = createTokenStream(input); } tokenStream.reset(); monitoringFileTask.process(); } @Override public boolean incrementToken() throws IOException { return tokenStream.incrementToken(); } @Override public void end() throws IOException { tokenStream.end(); } @Override public int hashCode() { return tokenStream.hashCode(); } @Override public boolean equals(final Object obj) { return tokenStream.equals(obj); } @Override public String toString() { return tokenStream.toString(); } protected TokenStream createTokenStream(final TokenStream input) { tokenStreamTimestamp = factoryTimestamp; final TokenStream tokenStream = baseTokenFilterFactory .create(input); try { final Object attributesObj = attributesField.get(tokenStream); attributesField.set(this, attributesObj); final Object attributeImplsObj = attributeImplsField .get(tokenStream); attributeImplsField.set(this, attributeImplsObj); final Object currentStateObj = currentStateField .get(tokenStream); currentStateField.set(this, currentStateObj); } catch (final Exception e) { throw new IllegalStateException( "Failed to update the tokenStream.", e); } return tokenStream; } } }
package org.jembi.rhea.transformers; /** * Handle XDS ITI-43 Retrieve Document Set response */ import ihe.iti.atna.AuditMessage; import ihe.iti.atna.EventIdentificationType; import ihe.iti.xds_b._2007.RetrieveDocumentSetResponseType; import ihe.iti.xds_b._2007.RetrieveDocumentSetResponseType.DocumentResponse; import java.math.BigInteger; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import javax.xml.bind.JAXBException; import oasis.names.tc.ebxml_regrep.xsd.rs._3.RegistryErrorList; import oasis.names.tc.ebxml_regrep.xsd.rs._3.RegistryResponseType; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jembi.ihe.atna.ATNAUtil; import org.jembi.ihe.atna.ATNAUtil.ParticipantObjectDetail; import org.jembi.rhea.Constants; import org.mule.api.MuleException; import org.mule.api.MuleMessage; import org.mule.api.transformer.TransformerException; import org.mule.api.transport.PropertyScope; import org.mule.module.client.MuleClient; import org.mule.transformer.AbstractMessageTransformer; public class XDSRepositoryRetrieveDocumentSetResponse extends AbstractMessageTransformer { private Log log = LogFactory.getLog(this.getClass()); private String xdsRepositoryHost = ""; private String xdsRepositoryPath = ""; private String xdsRepositoryPort = ""; private String xdsRepositorySecurePort = ""; private String iheSecure = ""; private String requestedAssigningAuthority = ""; //not thread safe... private String _homeCommunityId; private String _docUniqueId; @SuppressWarnings({ "unchecked", "rawtypes" }) @Override public Object transformMessage(MuleMessage message, String outputEncoding) throws TransformerException { // process response if (message.getPayload()==null) { log.error("Null response received from XDS repository"); return null; } else if (message.getPayload() instanceof RetrieveDocumentSetResponseType) { RetrieveDocumentSetResponseType response = (RetrieveDocumentSetResponseType) message.getPayload(); return Collections.singleton(processResponse(message, response)); } else if (message.getPayload() instanceof ArrayList && ((List)message.getPayload()).size()>0) { if (!(((List)message.getPayload()).get(0) instanceof RetrieveDocumentSetResponseType)) { log.error("Unknown response type received from XDS repository in list: " + ((List)message.getPayload()).get(0).getClass()); return null; } if (((List)message.getPayload()).isEmpty()) { log.info("No documents for patient in repository"); return null; } List<RetrieveDocumentSetResponseType> responses = (List<RetrieveDocumentSetResponseType>)message.getPayload(); List<String> res = new ArrayList<String>(responses.size()); for (RetrieveDocumentSetResponseType response : responses) res.add(processResponse(message, response)); return res; } else { log.error("Unknown response type received from XDS repository: " + message.getPayload().getClass()); return null; } } @SuppressWarnings("unchecked") private String processResponse(MuleMessage message, RetrieveDocumentSetResponseType response) throws TransformerException { boolean outcome = false; String repositoryUniqueId = null; String document = null; try { // get a list of doc unique id separated by ":" document = getDocument(response); outcome = true; } finally { try { //generate audit message String request = ((Map<String, String>)message.getProperty(Constants.XDS_ITI_43, PropertyScope.SESSION)).get(_docUniqueId); String patientId = (String)message.getProperty(Constants.XDS_ITI_18_PATIENTID_PROPERTY, PropertyScope.SESSION); ATNAUtil.dispatchAuditMessage(muleContext, generateATNAMessage(request, patientId, repositoryUniqueId, outcome)); log.info("Dispatched ATNA message"); } catch (Exception e) { //If the auditing breaks, it shouldn't break the flow, so catch and log log.error("Failed to dispatch ATNA message", e); } } // return the content of the document return document; } private String getDocument(RetrieveDocumentSetResponseType drResponse) throws TransformerException { RegistryResponseType rrt = drResponse.getRegistryResponse(); if (rrt!= null && rrt.getRegistryErrorList() != null) { RegistryErrorList rel = rrt.getRegistryErrorList(); if (rel != null && rel.getRegistryError() != null && rel.getRegistryError().size() > 0 && rel.getRegistryError().get(0) != null) { throw new TransformerException(this, new Exception("TotalErrors: " + rel.getRegistryError().size() + "FirstError: " + rel.getRegistryError().get(0).getValue())); } } String status = (rrt==null? "" : rrt.getStatus()); // ??Shall we log this and other information(e.g. totalResultCnt, documentLength, mimeType, etc) anywhere?? int totalResultCnt = 0; String document = null; List<DocumentResponse> drList = drResponse.getDocumentResponse(); // <ns2:DocumentResponse> if (drList != null && drList.size() > 0 && drList.get(0) != null) { totalResultCnt = drList.size(); for (DocumentResponse dr : drList) { // may want to loop thru the results at some point, but for now...... _homeCommunityId = dr.getHomeCommunityId(); // <ns2:HomeCommunityId>urn:oid:1.3.6.1.4.1.12009.6.1</ns2:HomeCommunityId> String reposUniqueId = dr.getRepositoryUniqueId(); // <ns2:RepositoryUniqueId>1</ns2:RepositoryUniqueId> _docUniqueId = dr.getDocumentUniqueId(); // <ns2:DocumentUniqueId>1.123401.11111</ns2:DocumentUniqueId> String mimeType = dr.getMimeType(); // <ns2:mimeType>text/xml</ns2:mimeType> if(dr.getDocument()!=null) { document = new String(dr.getDocument()); // <ns2:Document>VEVTVCBET0NVTUVOVCBDT05URU5U</ns2:Document> int documentLength = dr.getDocument().length; } else { throw new TransformerException(this, new Exception("dr.getDocument() returns null!")); } } } return document; } /* Auditing */ protected String generateATNAMessage(String request, String patientId, String repositoryUniqueId, boolean outcome) throws JAXBException { AuditMessage res = new AuditMessage(); EventIdentificationType eid = new EventIdentificationType(); eid.setEventID( ATNAUtil.buildCodedValueType("DCM", "110107", "Import") ); eid.setEventActionCode("C"); eid.setEventDateTime( ATNAUtil.newXMLGregorianCalendar() ); eid.getEventTypeCode().add( ATNAUtil.buildCodedValueType("IHE Transactions", "ITI-43", "Retrieve Document Set") ); eid.setEventOutcomeIndicator(outcome ? BigInteger.ZERO : new BigInteger("4")); res.setEventIdentification(eid); res.getActiveParticipant().add( ATNAUtil.buildActiveParticipant(buildRepositoryPath(), xdsRepositoryHost, false, xdsRepositoryHost, (short)1, "DCM", "110153", "Source")); res.getActiveParticipant().add( ATNAUtil.buildActiveParticipant(ATNAUtil.WSA_REPLYTO_ANON, ATNAUtil.getProcessID(), true, ATNAUtil.getHostIP(), (short)2, "DCM", "110152", "Destination")); res.getAuditSourceIdentification().add(ATNAUtil.buildAuditSource()); res.getParticipantObjectIdentification().add( ATNAUtil.buildParticipantObjectIdentificationType(String.format("%s^^^&%s&ISO", patientId, requestedAssigningAuthority), (short)1, (short)1, "RFC-3881", "2", "PatientNumber", null) ); List<ParticipantObjectDetail> pod = new ArrayList<ParticipantObjectDetail>(); if (repositoryUniqueId!=null) pod.add(new ParticipantObjectDetail("Repository Unique Id", repositoryUniqueId.getBytes())); if (_homeCommunityId!=null) pod.add(new ParticipantObjectDetail("ihe:homeCommunityID", _homeCommunityId.getBytes())); res.getParticipantObjectIdentification().add( ATNAUtil.buildParticipantObjectIdentificationType( _docUniqueId, (short)2, (short)3, "RFC-3881", "9", "Report Number", request, pod ) ); return ATNAUtil.marshallATNAObject(res); } public String getXdsRepositoryHost() { return xdsRepositoryHost; } public void setXdsRepositoryHost(String xdsRepositoryHost) { this.xdsRepositoryHost = xdsRepositoryHost; } public String getRequestedAssigningAuthority() { return requestedAssigningAuthority; } public void setRequestedAssigningAuthority(String requestedAssigningAuthority) { this.requestedAssigningAuthority = requestedAssigningAuthority; } public String getXdsRepositoryPath() { return xdsRepositoryPath; } public void setXdsRepositoryPath(String xdsRepositoryPath) { this.xdsRepositoryPath = xdsRepositoryPath; } public String getXdsRepositoryPort() { return xdsRepositoryPort; } public void setXdsRepositoryPort(String xdsRepositoryPort) { this.xdsRepositoryPort = xdsRepositoryPort; } public String getXdsRepositorySecurePort() { return xdsRepositorySecurePort; } public void setXdsRepositorySecurePort(String xdsRepositorySecurePort) { this.xdsRepositorySecurePort = xdsRepositorySecurePort; } public String getIheSecure() { return iheSecure; } public void setIheSecure(String iheSecure) { this.iheSecure = iheSecure; } private String buildRepositoryPath() { return String.format("%s:%s/%s", xdsRepositoryHost, ((iheSecure.equalsIgnoreCase("true")) ? xdsRepositorySecurePort : xdsRepositoryPort), xdsRepositoryPath); } }
package org.jenkins_ci.plugins.run_condition.holiday; import hudson.Extension; import hudson.init.InitMilestone; import hudson.init.Initializer; import hudson.util.CopyOnWriteMap; import java.util.*; @Extension public class GermanHolidayCalculator { private static Map<Integer, List<Calendar>> publicHolidays = new HashMap<Integer, List<Calendar>>(); private static GermanHolidayCalculator germanHolidayCalculator = null; public GermanHolidayCalculator() { super(); } @Initializer(after=InitMilestone.PLUGINS_STARTED) public static void init() { if (germanHolidayCalculator == null) { germanHolidayCalculator = new GermanHolidayCalculator(); } Calendar now = Calendar.getInstance(); int year = now.get(Calendar.YEAR); for (int i = 0; i < 6; i++) { year = year + i; List<Calendar> holidays = germanHolidayCalculator.calculatePublicHolidays(year); publicHolidays.put(year, holidays); } } public static GermanHolidayCalculator getInstance() { if (germanHolidayCalculator == null) { germanHolidayCalculator = new GermanHolidayCalculator(); } return germanHolidayCalculator; } public boolean isHoliday(Calendar date) { int year = date.get(Calendar.YEAR); if (publicHolidays.isEmpty() || !publicHolidays.containsKey(year)) { List<Calendar> holidays = calculatePublicHolidays(year); publicHolidays.put(year, holidays); } return isHoliday(date, publicHolidays.get(year)); } public boolean isHoliday(Calendar date, List<Calendar> holidays) { for (Calendar cal : holidays) { if (cal.get(Calendar.YEAR) == date.get(Calendar.YEAR) && cal.get(Calendar.MONTH) == date.get(Calendar.MONTH) && cal.get(Calendar.DAY_OF_MONTH) == date.get(Calendar.DAY_OF_MONTH)) { return true; } } return false; } /** * Calculates the Eastern Sunday, this is the base for several other public holidays in germany. * * @param year * @return */ public Calendar calculateEasterSunday(int year) { if (year <= 1582) { throw new IllegalArgumentException( "Algorithm invalid before April 1583"); } int golden, century, x, z, d, epact, n; golden = (year % 19) + 1; /* E1: metonic cycle */ century = (year / 100) + 1; /* E2: e.g. 1984 was in 20th C */ x = (3 * century / 4) - 12; /* E3: leap year correction */ z = ((8 * century + 5) / 25) - 5; /* E3: sync with moon's orbit */ d = (5 * year / 4) - x - 10; epact = (11 * golden + 20 + z - x) % 30; /* E5: epact */ if ((epact == 25 && golden > 11) || epact == 24) epact++; n = 44 - epact; n += 30 * (n < 21 ? 1 : 0); n += 7 - ((d + n) % 7); if (n > 31) return new GregorianCalendar(year, 4 - 1, n - 31); /* April */ else return new GregorianCalendar(year, 3 - 1, n); /* March */ } /** * Returns a list of all necessary public holidays in germany. * * @param year * @return */ public List<Calendar> calculatePublicHolidays(int year) { List<Calendar> holidays = new ArrayList<Calendar>(); Calendar gc_ostersonntag = this.calculateEasterSunday(year); holidays.add(gc_ostersonntag); Calendar gc_neujahr = new GregorianCalendar(gc_ostersonntag.get(Calendar.YEAR), 0, 1); holidays.add(gc_neujahr); Calendar gc_karfreitag = new GregorianCalendar(gc_ostersonntag.get(Calendar.YEAR), gc_ostersonntag.get(Calendar.MONTH), (gc_ostersonntag.get(Calendar.DATE) - 2)); holidays.add(gc_karfreitag); Calendar gc_ostermontag = new GregorianCalendar(gc_ostersonntag.get(Calendar.YEAR), gc_ostersonntag.get(Calendar.MONTH), (gc_ostersonntag.get(Calendar.DATE) + 1)); holidays.add(gc_ostermontag); Calendar gc_christihimmelfahrt = new GregorianCalendar(gc_ostersonntag.get(Calendar.YEAR), gc_ostersonntag.get(Calendar.MONTH), (gc_ostersonntag.get(Calendar.DATE) + 39)); holidays.add(gc_christihimmelfahrt); Calendar gc_pfingstsonntag = new GregorianCalendar(gc_ostersonntag.get(Calendar.YEAR), gc_ostersonntag.get(Calendar.MONTH), (gc_ostersonntag.get(Calendar.DATE) + 49)); holidays.add(gc_pfingstsonntag); Calendar gc_pfingstmontag = new GregorianCalendar(gc_ostersonntag.get(Calendar.YEAR), gc_ostersonntag.get(Calendar.MONTH), (gc_ostersonntag.get(Calendar.DATE) + 50)); holidays.add(gc_pfingstmontag); // not an official german holiday... // Calendar gc_frohnleichnahm = new GregorianCalendar(gc_ostersonntag.get(Calendar.YEAR), gc_ostersonntag.get(Calendar.MONTH), (gc_ostersonntag.get(Calendar.DATE) + 60)); // holidays.add(gc_frohnleichnahm); Calendar gc_erstemai = new GregorianCalendar(gc_ostersonntag.get(Calendar.YEAR), 4, 1); holidays.add(gc_erstemai); Calendar gc_wiedervereinigung = new GregorianCalendar(gc_ostersonntag.get(Calendar.YEAR), 9, 3); holidays.add(gc_wiedervereinigung); Calendar gc_weihnachten_2 = new GregorianCalendar(gc_ostersonntag.get(Calendar.YEAR), 11, 25); holidays.add(gc_weihnachten_2); Calendar gc_weihnachten_3 = new GregorianCalendar(gc_ostersonntag.get(Calendar.YEAR), 11, 26); holidays.add(gc_weihnachten_3); return holidays; } }
package org.jenkinsci.plugins.authorizeproject; import hudson.Extension; import hudson.model.AbstractProject; import hudson.model.Queue; import javax.annotation.CheckForNull; import org.acegisecurity.Authentication; import org.kohsuke.stapler.DataBoundConstructor; import jenkins.security.QueueItemAuthenticatorConfiguration; import jenkins.security.QueueItemAuthenticatorDescriptor; import jenkins.security.QueueItemAuthenticator; /** * Authorize builds of projects configured with {@link AuthorizeProjectProperty}. */ public class ProjectQueueItemAuthenticator extends QueueItemAuthenticator { @DataBoundConstructor public ProjectQueueItemAuthenticator() { } /** * @param item * @return * @see jenkins.security.QueueItemAuthenticator#authenticate(hudson.model.Queue.Item) */ @Override @CheckForNull public Authentication authenticate(Queue.Item item) { if (!(item.task instanceof AbstractProject)) { // This handles only AbstractProject. return null; } AbstractProject<?, ?> project = ((AbstractProject<?,?>)item.task).getRootProject(); AuthorizeProjectProperty prop = project.getProperty(AuthorizeProjectProperty.class); if (prop == null) { return null; } return prop.authenticate(item); } @Extension public static class DescriptorImpl extends QueueItemAuthenticatorDescriptor { /** * @return the name shown in the security configuration page. * @see hudson.model.Descriptor#getDisplayName() */ @Override public String getDisplayName() { return Messages.ProjectQueueItemAuthenticator_DisplayName(); } } /** * @return whether Jenkins is configured to use {@link ProjectQueueItemAuthenticator}. */ public static boolean isConfigured() { for (QueueItemAuthenticator authenticator: QueueItemAuthenticatorConfiguration.get().getAuthenticators()) { if (authenticator instanceof ProjectQueueItemAuthenticator) { return true; } } return false; } }
package org.sagebionetworks.web.client.widget.entity.renderer; import static org.sagebionetworks.web.shared.EntityBundleTransport.ENTITY; import java.util.ArrayList; import java.util.List; import org.sagebionetworks.repo.model.Entity; import org.sagebionetworks.repo.model.EntityGroupRecord; import org.sagebionetworks.repo.model.FileEntity; import org.sagebionetworks.repo.model.LocationData; import org.sagebionetworks.repo.model.Locationable; import org.sagebionetworks.repo.model.ObjectType; import org.sagebionetworks.repo.model.Reference; import org.sagebionetworks.repo.model.Versionable; import org.sagebionetworks.schema.adapter.JSONObjectAdapterException; import org.sagebionetworks.web.client.DisplayConstants; import org.sagebionetworks.web.client.DisplayUtils; import org.sagebionetworks.web.client.SynapseClientAsync; import org.sagebionetworks.web.client.SynapseJSNIUtils; import org.sagebionetworks.web.client.model.EntityBundle; import org.sagebionetworks.web.client.transform.NodeModelCreator; import org.sagebionetworks.web.client.widget.entity.EntityGroupRecordDisplay; import org.sagebionetworks.web.client.widget.entity.registration.WidgetEncodingUtil; import org.sagebionetworks.web.shared.EntityBundleTransport; import org.sagebionetworks.web.shared.WikiPageKey; import org.sagebionetworks.web.shared.exceptions.ForbiddenException; import org.sagebionetworks.web.shared.exceptions.NotFoundException; import org.sagebionetworks.web.shared.exceptions.UnauthorizedException; import org.sagebionetworks.web.shared.exceptions.UnknownErrorException; import com.google.gwt.safehtml.shared.SafeHtml; import com.google.gwt.safehtml.shared.SafeHtmlBuilder; import com.google.gwt.safehtml.shared.SafeHtmlUtils; import com.google.gwt.user.client.rpc.AsyncCallback; /** * This utility class holds common presenter logic for the EntityListWidget and EntityListConfigEditor * @author dburdick * */ public class EntityListUtil { private static final int MAX_DESCRIPTION_CHARS = 165; private final static String NOTE_DELIMITER = ","; private final static String LIST_DELIMITER = ";"; public interface RowLoadedHandler { public void onLoaded(EntityGroupRecordDisplay entityGroupRecordDisplay); } public static void loadIndividualRowDetails( final SynapseClientAsync synapseClient, final SynapseJSNIUtils synapseJSNIUtils, final NodeModelCreator nodeModelCreator, final boolean isLoggedIn, List<EntityGroupRecord> records, final int rowIndex, final RowLoadedHandler handler) throws IllegalArgumentException { if(records == null || rowIndex >= records.size()) { throw new IllegalArgumentException(); } final EntityGroupRecord record = records.get(rowIndex); if(record == null) return; final Reference ref = record.getEntityReference(); if(ref == null) return; AsyncCallback<EntityBundleTransport> callback = new AsyncCallback<EntityBundleTransport>() { @Override public void onSuccess(EntityBundleTransport result) { EntityBundle bundle = null; try { bundle = nodeModelCreator.createEntityBundle(result); // Old behavior. handler.onLoaded(createRecordDisplay(isLoggedIn, bundle, record, synapseJSNIUtils, bundle.getEntity().getDescription())); // | | | This grabs a description from the field or wiki depending on // | | | whether or not the entity is deprecated. Commented out as // | | | we are temporarily reverting to old behavior. Also, markdown // V V V is not processed. // if (bundle.getEntity() instanceof Locationable) { // handler.onLoaded(createRecordDisplay(isLoggedIn, bundle, record, // synapseJSNIUtils, bundle.getEntity().getDescription())); // } else { // createDisplayWithWikiDescription( // synapseClient, synapseJSNIUtils, // isLoggedIn, handler, bundle, record, ref); } catch (JSONObjectAdapterException e) { onFailure(new UnknownErrorException(DisplayConstants.ERROR_INCOMPATIBLE_CLIENT_VERSION)); } } @Override public void onFailure(Throwable caught) { createFailureDisplay(caught, ref, handler); } }; int mask = ENTITY; if(ref.getTargetVersionNumber() != null) { synapseClient.getEntityBundleForVersion(ref.getTargetId(), ref.getTargetVersionNumber(), mask, callback); } else { // failsafe synapseClient.getEntityBundle(ref.getTargetId(), mask, callback); } } /** * Gets a plain text description from the wiki associated with the Entity of the given * bundle. Creates a record display with that description. * * Note: access modifier public for unit test */ public static void createDisplayWithWikiDescription( final SynapseClientAsync synapseClient, final SynapseJSNIUtils synapseJSNIUtils, final boolean isLoggedIn, final RowLoadedHandler handler, final EntityBundle bundle, final EntityGroupRecord record, final Reference ref) { String entityId = bundle.getEntity().getId(); String objectType = ObjectType.ENTITY.toString(); WikiPageKey key = new WikiPageKey(entityId, objectType, null); synapseClient.getPlainTextWikiPage(key, new AsyncCallback<String>() { @Override public void onSuccess(String resultDesc) { try { handler.onLoaded(createRecordDisplay(isLoggedIn, bundle, record, synapseJSNIUtils, resultDesc)); } catch (JSONObjectAdapterException e) { onFailure(new UnknownErrorException(DisplayConstants.ERROR_INCOMPATIBLE_CLIENT_VERSION)); } } @Override public void onFailure(Throwable caught) { if (caught instanceof NotFoundException) { // No wiki found. Put in blank description. try { handler.onLoaded(createRecordDisplay(isLoggedIn, bundle, record, synapseJSNIUtils, "")); } catch (JSONObjectAdapterException e) { onFailure(new UnknownErrorException(DisplayConstants.ERROR_INCOMPATIBLE_CLIENT_VERSION)); } } else { createFailureDisplay(caught, ref, handler); } } }); } public static String recordsToString(List<EntityGroupRecord> records) { // add record to descriptor String recordStr = ""; if(records == null) return recordStr; for(EntityGroupRecord record : records) { Reference ref = record.getEntityReference(); if(ref == null) continue; if(!recordStr.equals("")) recordStr += LIST_DELIMITER; recordStr += DisplayUtils.createEntityVersionString(ref.getTargetId(), ref.getTargetVersionNumber()); String note = record.getNote(); if(note != null && !note.equals("")) { recordStr += NOTE_DELIMITER + WidgetEncodingUtil.encodeValue(note); } } return recordStr; } public static List<EntityGroupRecord> parseRecords(String recordStr) { List<EntityGroupRecord> records = new ArrayList<EntityGroupRecord>(); if(recordStr == null || "".equals(recordStr)) return records; String[] entries = recordStr.split(LIST_DELIMITER); for(String entry : entries) { String[] parts = entry.split(NOTE_DELIMITER); if(parts.length <= 0) continue; EntityGroupRecord record = new EntityGroupRecord(); if(parts[0] != null && !"".equals(parts[0])) { Reference ref = DisplayUtils.parseEntityVersionString(parts[0]); if(ref == null) continue; record.setEntityReference(ref); } if(parts.length > 1) { record.setNote(WidgetEncodingUtil.decodeValue(parts[1])); } if(record.getEntityReference() != null) records.add(record); } return records; } /* * Private methods */ private static EntityGroupRecordDisplay createRecordDisplay( boolean isLoggedIn, EntityBundle bundle, EntityGroupRecord record, SynapseJSNIUtils synapseJSNIUtils, String description) throws JSONObjectAdapterException { Entity referencedEntity = bundle.getEntity(); String nameLinkUrl; if(referencedEntity instanceof Versionable) { nameLinkUrl = DisplayUtils.getSynapseHistoryTokenNoHash(referencedEntity.getId(), ((Versionable)referencedEntity).getVersionNumber()); } else { nameLinkUrl = DisplayUtils.getSynapseHistoryTokenNoHash(referencedEntity.getId()); } // download String downloadUrl = null; if(!isLoggedIn) { if(bundle.getEntity() instanceof FileEntity || bundle.getEntity() instanceof Locationable) downloadUrl = "#" + nameLinkUrl; } else if(referencedEntity instanceof Locationable) { List<LocationData> locations = ((Locationable) referencedEntity).getLocations(); if(locations != null && locations.size() > 0) { downloadUrl = locations.get(0).getPath(); } } else if(referencedEntity instanceof FileEntity) { downloadUrl = DisplayUtils.createFileEntityUrl(synapseJSNIUtils.getBaseFileHandleUrl(), referencedEntity.getId(), ((FileEntity) referencedEntity).getVersionNumber(), false); } // version String version = "N/A"; if(referencedEntity instanceof Versionable) { version = DisplayUtils.getVersionDisplay((Versionable)referencedEntity); } // desc if (description == null) description = ""; description = description.replaceAll("\\n", " "); // keep to 3 lines by removing new lines if(description.length() > MAX_DESCRIPTION_CHARS) description = description.substring(0, MAX_DESCRIPTION_CHARS) + " ..."; SafeHtml descSafe = new SafeHtmlBuilder().appendEscapedLines(description).toSafeHtml(); // note SafeHtml noteSafe = record.getNote() == null ? SafeHtmlUtils.fromSafeConstant("") : new SafeHtmlBuilder().appendEscapedLines(record.getNote()).toSafeHtml(); return new EntityGroupRecordDisplay( referencedEntity.getId(), SafeHtmlUtils.fromString(referencedEntity.getName()), nameLinkUrl, downloadUrl, descSafe, SafeHtmlUtils.fromString(version), referencedEntity.getModifiedOn(), referencedEntity.getCreatedBy() == null ? "" : referencedEntity.getCreatedBy(), noteSafe); } private static void createFailureDisplay(Throwable caught, Reference ref, final RowLoadedHandler handler) { EntityGroupRecordDisplay errorDisplay = getEmptyDisplay(); errorDisplay.setEntityId(ref.getTargetId()); String versionNumber = ref.getTargetVersionNumber() == null ? "" : ref.getTargetVersionNumber().toString(); errorDisplay.setVersion(SafeHtmlUtils.fromSafeConstant(versionNumber)); String msg = ref.getTargetId(); if(ref.getTargetVersionNumber() != null) msg += ", Version " + versionNumber; if(caught instanceof UnauthorizedException || caught instanceof ForbiddenException) { errorDisplay.setName(SafeHtmlUtils.fromSafeConstant(DisplayConstants.TITLE_UNAUTHORIZED + ": " + msg)); } else if (caught instanceof NotFoundException) { errorDisplay.setName(SafeHtmlUtils.fromSafeConstant(DisplayConstants.NOT_FOUND + ": " + msg)); } else { errorDisplay.setName(SafeHtmlUtils.fromSafeConstant(DisplayConstants.ERROR_LOADING + ": " + msg)); } handler.onLoaded(errorDisplay); } private static EntityGroupRecordDisplay getEmptyDisplay() { return new EntityGroupRecordDisplay( "", SafeHtmlUtils.EMPTY_SAFE_HTML, null, null, SafeHtmlUtils.EMPTY_SAFE_HTML, SafeHtmlUtils.EMPTY_SAFE_HTML, null, "", SafeHtmlUtils.EMPTY_SAFE_HTML); } }
package org.spongepowered.common.mixin.core.inventory; import net.minecraft.inventory.IInventory; import net.minecraft.inventory.InventoryBasic; import net.minecraft.inventory.InventoryCrafting; import net.minecraft.item.ItemStack; import net.minecraft.util.NonNullList; import org.spongepowered.asm.mixin.Implements; import org.spongepowered.asm.mixin.Interface; import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.Shadow; import org.spongepowered.asm.mixin.injection.At; import org.spongepowered.asm.mixin.injection.Inject; import org.spongepowered.asm.mixin.injection.callback.CallbackInfo; import org.spongepowered.common.item.inventory.adapter.InventoryAdapter; import org.spongepowered.common.item.inventory.adapter.impl.MinecraftInventoryAdapter; import org.spongepowered.common.item.inventory.lens.Fabric; import org.spongepowered.common.item.inventory.lens.Lens; import org.spongepowered.common.item.inventory.lens.LensProvider; import org.spongepowered.common.item.inventory.lens.SlotProvider; import org.spongepowered.common.item.inventory.lens.impl.collections.SlotCollection; import org.spongepowered.common.item.inventory.lens.impl.comp.OrderedInventoryLensImpl; import org.spongepowered.common.item.inventory.lens.impl.fabric.DefaultInventoryFabric; @Mixin(InventoryCrafting.class) @Implements(value = @Interface(iface = MinecraftInventoryAdapter.class, prefix = "inventory$")) public abstract class MixinInventoryCrafting implements IInventory, LensProvider<IInventory, ItemStack> { @Shadow private NonNullList<ItemStack> stackList; protected Fabric<IInventory> fabric; protected SlotCollection slots; protected Lens<IInventory, ItemStack> lens; @Inject(method = "<init>", at = @At("RETURN")) public void onConstructed(CallbackInfo ci) { this.fabric = new DefaultInventoryFabric(this); this.slots = new SlotCollection.Builder().add(this.stackList.size()).build(); this.lens = getRootLens(fabric, ((InventoryAdapter) this)); } @Override public Lens<IInventory, ItemStack> getRootLens(Fabric<IInventory> fabric, InventoryAdapter<IInventory, ItemStack> adapter) { if (this.stackList.size() == 0) { return null; // No Lens when inventory has no slots } return new OrderedInventoryLensImpl(0, this.stackList.size(), 1, this.slots); } public SlotProvider<IInventory, ItemStack> inventory$getSlotProvider() { return this.slots; } public Lens<IInventory, ItemStack> inventory$getRootLens() { return this.lens; } public Fabric<IInventory> inventory$getInventory() { return this.fabric; } }
package uk.ac.ebi.ddi.annotation.service.dataset; import com.mongodb.BasicDBObject; import org.kohsuke.rngom.parse.host.Base; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.mongodb.core.aggregation.*; import org.springframework.data.mongodb.core.query.Criteria; import uk.ac.ebi.ddi.annotation.utils.Constants; import uk.ac.ebi.ddi.annotation.utils.DatasetUtils; import uk.ac.ebi.ddi.annotation.utils.Utils; import uk.ac.ebi.ddi.ebe.ws.dao.model.common.QueryResult; import uk.ac.ebi.ddi.service.db.model.aggregate.BaseAggregate; import uk.ac.ebi.ddi.service.db.model.dataset.Dataset; import uk.ac.ebi.ddi.service.db.model.dataset.DatasetSimilars; import uk.ac.ebi.ddi.service.db.model.dataset.DatasetStatus; import uk.ac.ebi.ddi.service.db.model.dataset.SimilarDataset; import uk.ac.ebi.ddi.service.db.model.publication.PublicationDataset; import uk.ac.ebi.ddi.service.db.service.dataset.IDatasetService; import uk.ac.ebi.ddi.service.db.service.dataset.IDatasetSimilarsService; import uk.ac.ebi.ddi.service.db.service.dataset.IDatasetStatusService; import uk.ac.ebi.ddi.service.db.service.logger.IHttpEventService; import uk.ac.ebi.ddi.service.db.service.publication.IPublicationDatasetService; import uk.ac.ebi.ddi.service.db.utils.DatasetCategory; import uk.ac.ebi.ddi.service.db.utils.DatasetSimilarsType; import uk.ac.ebi.ddi.service.db.utils.Tuple; import uk.ac.ebi.ddi.xml.validator.parser.model.Entry; import uk.ac.ebi.ddi.xml.validator.utils.Field; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.*; import java.util.stream.Collectors; /** * @author Yasset Perez-Riverol (ypriverol@gmail.com) * @date 05/05/2016 */ public class DDIDatasetAnnotationService { @Autowired IDatasetService datasetService; @Autowired IDatasetStatusService statusService; @Autowired IPublicationDatasetService publicationService; @Autowired IDatasetSimilarsService similarsService; @Autowired IHttpEventService httpEventService; private static final Logger LOGGER = LoggerFactory.getLogger(DDIDatasetAnnotationService.class); /** * This function looks for individual datasets and check if they are in the database and if they needs to * be updated. * * @param dataset */ @Deprecated public void insertDataset(Entry dataset){ Dataset dbDataset = DatasetUtils.transformEntryDataset(dataset); dbDataset = Utils.replaceTextCase(dbDataset); Dataset currentDataset = datasetService.read(dbDataset.getAccession(), dbDataset.getDatabase()); if(currentDataset == null){ insertDataset(dbDataset); }else if(currentDataset.getInitHashCode() != dbDataset.getInitHashCode()){ updateDataset(currentDataset, dbDataset); } } /** * THis insert use a fixed database Name and not the one provided by the user * @param dataset Dataset Entry from the XML * @param databaseName database name provided by the users */ public void insertDataset(Entry dataset, String databaseName){ Dataset dbDataset = DatasetUtils.transformEntryDataset(dataset, databaseName); dbDataset = Utils.replaceTextCase(dbDataset); Dataset currentDataset = datasetService.read(dbDataset.getAccession(), dbDataset.getDatabase()); if(currentDataset == null){ insertDataset(dbDataset); }else if(currentDataset.getInitHashCode() != dbDataset.getInitHashCode()){ updateDataset(currentDataset, dbDataset); } } private void updateDataset(Dataset currentDataset, Dataset dbDataset) { dbDataset = datasetService.update(currentDataset.getId(), dbDataset); if(dbDataset.getId() != null){ statusService.save(new DatasetStatus(dbDataset.getAccession(), dbDataset.getDatabase(), dbDataset.getInitHashCode(), getDate(), DatasetCategory.INSERTED.getType())); } } public void annotateDataset(Dataset exitingDataset) { if(!exitingDataset.getCurrentStatus().equalsIgnoreCase(DatasetCategory.DELETED.getType())) exitingDataset.setCurrentStatus(DatasetCategory.UPDATED.getType()); datasetService.update(exitingDataset.getId(), exitingDataset); if(exitingDataset.getCrossReferences() != null && !DatasetUtils.getCrossReferenceFieldValue(exitingDataset, Field.PUBMED.getName()).isEmpty()){ for(String pubmedId: DatasetUtils.getCrossReferenceFieldValue(exitingDataset, Field.PUBMED.getName())){ //Todo: In the future we need to check for providers that have multiple omics already. publicationService.save(new PublicationDataset(pubmedId, exitingDataset.getAccession(), exitingDataset.getDatabase(), DatasetUtils.getFirstAdditionalFieldValue(exitingDataset, Field.OMICS.getName()))); } } } public List<PublicationDataset> getPublicationDatasets(){ return publicationService.readAll(); } public void enrichedDataset(Dataset existingDataset) { if(!existingDataset.getCurrentStatus().equalsIgnoreCase(DatasetCategory.DELETED.getType())) existingDataset.setCurrentStatus(DatasetCategory.ENRICHED.getType()); datasetService.update(existingDataset.getId(), existingDataset); } public void updateDeleteStatus(Dataset dataset) { Dataset existingDataset = datasetService.read(dataset.getId()); updateStatus(existingDataset, DatasetCategory.DELETED.getType()); } private void updateStatus(Dataset dbDataset, String status){ dbDataset.setCurrentStatus(status); dbDataset = datasetService.update(dbDataset.getId(), dbDataset); if(dbDataset.getId() != null){ statusService.save(new DatasetStatus(dbDataset.getAccession(), dbDataset.getDatabase(), dbDataset.getInitHashCode(), getDate(), status)); } } public List<Dataset> getAllDatasetsByDatabase(String databaseName){ return datasetService.readDatasetHashCode(databaseName); } public Dataset getDataset(String accession, String database) { return datasetService.read(accession, database); } /** * This function transform an Entry in the XML file into a dataset in the database * and add then to the database. * @param dbDataset */ private void insertDataset(Dataset dbDataset){ dbDataset = datasetService.save(dbDataset); if(dbDataset.getId() != null){ statusService.save(new DatasetStatus(dbDataset.getAccession(), dbDataset.getDatabase(), dbDataset.getInitHashCode(), getDate(), DatasetCategory.INSERTED.getType())); } } public Integer findDataset(Entry dataset){ Dataset dbDataset = datasetService.read(dataset.getAcc(), dataset.getRepository()); if(dbDataset != null) return dbDataset.getInitHashCode(); return null; } private String getDate(){ DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd"); return dateFormat.format(new java.util.Date()); } public void updateDataset(Dataset dataset) { datasetService.update(dataset.getId(), dataset); } /** * Find a dataset by the Accession * @param dbKey db accession * @return List of Datasets. */ public List<Dataset> getDataset(String dbKey) { return datasetService.findByAccession(dbKey); } public void updateDatasetSimilars(String accession, String database, Set<SimilarDataset> similars){ DatasetSimilars datasetExisting = similarsService.read(accession, database); if(datasetExisting == null) datasetExisting = new DatasetSimilars(accession, database, similars); else datasetExisting.setSimilars(similars); similarsService.save(datasetExisting); } public void addDatasetSimilars(Dataset dataset, Set<PublicationDataset> related, String type){ DatasetSimilars datasetExisting = similarsService.read(dataset.getAccession(), dataset.getDatabase()); Set<SimilarDataset> similarDatasets = new HashSet<>(); for(PublicationDataset publicationDataset: related){ if(!publicationDataset.getDatasetID().equalsIgnoreCase(dataset.getAccession()) && !publicationDataset.getDatabase().equalsIgnoreCase(dataset.getDatabase())){ Dataset datasetRelated = datasetService.read(publicationDataset.getDatasetID(), publicationDataset.getDatabase()); if(datasetRelated != null){ SimilarDataset similar = new SimilarDataset(datasetRelated, type); similarDatasets.add(similar); } } } if (similarDatasets.size() == 0) { LOGGER.warn("Adding related datasets to {} with type " + type + ", but none of them were in our database {}", dataset.getAccession(), related); return; } if(datasetExisting == null){ datasetExisting = new DatasetSimilars(dataset.getAccession(), dataset.getDatabase(), similarDatasets); similarsService.save(datasetExisting); } else{ Set<SimilarDataset> similars = datasetExisting.getSimilars(); similars.addAll(similarDatasets); datasetExisting.setSimilars(similars); similarsService.save(datasetExisting); } } public void addGEODatasetSimilars(Dataset dataset, Set<PublicationDataset> related, String type){ DatasetSimilars datasetExisting = similarsService.read(dataset.getAccession(), dataset.getDatabase()); Set<SimilarDataset> similarDatasets = new HashSet<>(); for(PublicationDataset publicationDataset: related){ if (!publicationDataset.getDatasetID().equalsIgnoreCase(dataset.getAccession())){ Dataset datasetRelated = datasetService.read(publicationDataset.getDatasetID(), publicationDataset.getDatabase()); if (datasetRelated == null) { List<Dataset> secondaries = datasetService.getBySecondaryAccession(publicationDataset.getDatasetID()); for (Dataset secondary : secondaries) { similarDatasets.add(new SimilarDataset(secondary, type)); } } else { similarDatasets.add(new SimilarDataset(datasetRelated, type)); } } } if (similarDatasets.size() == 0) { LOGGER.warn("Adding related datasets to {} with type " + type + ", but none of them were in our database {}", dataset.getAccession(), related.stream().map(PublicationDataset::getDatasetID).collect(Collectors.toList())); return; } if (datasetExisting == null) { datasetExisting = new DatasetSimilars(dataset.getAccession(), dataset.getDatabase(), similarDatasets); similarsService.save(datasetExisting); } else { Set<SimilarDataset> similars = datasetExisting.getSimilars(); similars.addAll(similarDatasets); datasetExisting.setSimilars(similars); similarsService.save(datasetExisting); } LOGGER.info("Added some new related datasets with type " + type + " to {}, {}", dataset.getAccession(), similarDatasets.stream().map(x -> x.getSimilarDataset().getAccession()).collect(Collectors.toList())); } public void addDatasetSimilars(String accession, String database, SimilarDataset similarDataset){ DatasetSimilars datasetExisting = similarsService.read(accession, database); if(datasetExisting == null){ datasetExisting = new DatasetSimilars(accession, database, similarDataset); similarsService.save(datasetExisting); }else{ Set<SimilarDataset> similars = datasetExisting.getSimilars(); similars.add(similarDataset); datasetExisting.setSimilars(similars); similarsService.save(datasetExisting); } } public void addDatasetReanalysisSimilars(Dataset dataset, Map<String, Set<String>> similarsMap) { DatasetSimilars datasetExisting = similarsService.read(dataset.getAccession(), dataset.getDatabase()); Set<SimilarDataset> similarDatasets = new HashSet<>(); for(Map.Entry publicationDataset: similarsMap.entrySet()){ String databaseKey = (String) publicationDataset.getKey(); Set<String> values = (Set<String>) publicationDataset.getValue(); for(String value: values){ if(!(databaseKey.equalsIgnoreCase(dataset.getDatabase()) && value.equalsIgnoreCase(dataset.getAccession()))){ Dataset datasetRelated = datasetService.read(value, databaseKey); if(datasetRelated != null){ SimilarDataset similar = new SimilarDataset(datasetRelated, DatasetSimilarsType.REANALYSIS_OF.getType()); SimilarDataset similar2 = new SimilarDataset(dataset, DatasetSimilarsType.REANALYZED_BY.getType()); similarDatasets.add(similar); addDatasetSimilars(datasetRelated.getAccession(),datasetRelated.getDatabase(), similar2); } } } } if(datasetExisting == null){ datasetExisting = new DatasetSimilars(dataset.getAccession(), dataset.getDatabase(), similarDatasets); similarsService.save(datasetExisting); }else{ Set<SimilarDataset> similars = datasetExisting.getSimilars(); similars.addAll(similarDatasets); datasetExisting.setSimilars(similars); similarsService.save(datasetExisting); } } public List<DatasetSimilars> getDatasetSimilars(){ return similarsService.readAll(); } public void removeSimilar(DatasetSimilars dataset) { similarsService.delete(dataset); } /*public List<PublicationDataset> getMutiomicsDatasets() { return datasetService.getMutiomicsDatasets(); }*/ public List<PublicationDataset> getMultiomics() { MatchOperation checkPubmedNull = Aggregation.match(new Criteria("crossReferences.pubmed").exists(true). andOperator(new Criteria("currentStatus").ne("Deleted"))); UnwindOperation unwindPubMed = Aggregation.unwind("crossReferences.pubmed"); GroupOperation groupPubmed = Aggregation.group("crossReferences.pubmed"). addToSet(new BasicDBObject("ac","$accession").append("db","$database")).as("datasets"). addToSet("additional.omics_type").as("omics_list").count().as("count"); ProjectionOperation projectStage = Aggregation.project("_id", "count","datasets","omics_list"). and("omics_list").size().as("omics_count").and("_id").as("pubmedId"); MatchOperation checkMultiomics = Aggregation.match(new Criteria("omics_count").gte(2)); UnwindOperation unwindDatasets = Aggregation.unwind("datasets"); ProjectionOperation projectAsPublication = Aggregation.project("pubmedId") .and("datasets.ac").as("accession"). and("datasets.db").as("database").andExclude("_id"); Aggregation aggregation = Aggregation.newAggregation(checkPubmedNull, unwindPubMed,groupPubmed,projectStage,checkMultiomics,unwindDatasets, projectAsPublication); List<PublicationDataset> aggResult = datasetService.getAggregationResults(aggregation,"datasets.dataset",PublicationDataset.class); return aggResult; } public void updateDatasetClaim() { String[] sourceDatasets = {Constants.PRIDE_DATABASE,Constants.METABOLIGHTS_DATABASE,Constants.METABOLOME_DATABASE, Constants.ARRAYEXPRESS_DATABASE,Constants.MASSIVE_DATABASE,Constants.JPOST_DATABASE}; datasetService.updateDatasetClaim(sourceDatasets); } public void updateMostAccessed() { httpEventService.moreAccessedDataset(20); } public long getMergedDatasetCount(String database, String accession){ return datasetService.getMergedDatasetCount(database, accession); } public void updatePrivateDataset(String database){ datasetService.updatePrivateDatasets(database); } public void getPrivateDatasets(String database){ datasetService.getPrivateDatasets(database); } }