answer
stringlengths
17
10.2M
package com.yahoo.vespa.model.admin.monitoring; import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import static com.yahoo.vespa.model.admin.monitoring.DefaultVespaMetrics.defaultVespaMetricSet; import static java.util.Collections.singleton; /** * Encapsulates vespa service metrics. * * @author gjoranv */ public class VespaMetricSet { public static final MetricSet vespaMetricSet = new MetricSet("vespa", getVespaMetrics(), singleton(defaultVespaMetricSet)); private static Set<Metric> getVespaMetrics() { Set<Metric> metrics = new LinkedHashSet<>(); metrics.addAll(getSearchNodeMetrics()); metrics.addAll(getStorageMetrics()); metrics.addAll(getDocprocMetrics()); metrics.addAll(getClusterControllerMetrics()); metrics.addAll(getQrserverMetrics()); metrics.addAll(getContainerMetrics()); metrics.addAll(getConfigServerMetrics()); metrics.addAll(getSentinelMetrics()); metrics.addAll(getOtherMetrics()); return Collections.unmodifiableSet(metrics); } private static Set<Metric> getSentinelMetrics() { Set<Metric> metrics = new LinkedHashSet<>(); metrics.add(new Metric("sentinel.restarts.count")); metrics.add(new Metric("sentinel.totalRestarts.last")); metrics.add(new Metric("sentinel.uptime.last")); metrics.add(new Metric("sentinel.running.count")); metrics.add(new Metric("sentinel.running.last")); return metrics; } private static Set<Metric> getOtherMetrics() { Set<Metric> metrics = new LinkedHashSet<>(); metrics.add(new Metric("slobrok.heartbeats.failed.count")); metrics.add(new Metric("logd.processed.lines.count")); // Java (JRT) TLS metrics metrics.add(new Metric("jrt.transport.tls-certificate-verification-failures")); metrics.add(new Metric("jrt.transport.peer-authorization-failures")); metrics.add(new Metric("jrt.transport.server.tls-connections-established")); metrics.add(new Metric("jrt.transport.client.tls-connections-established")); metrics.add(new Metric("jrt.transport.server.unencrypted-connections-established")); metrics.add(new Metric("jrt.transport.client.unencrypted-connections-established")); // C++ TLS metrics metrics.add(new Metric("vds.server.network.tls-handshakes-failed")); metrics.add(new Metric("vds.server.network.peer-authorization-failures")); metrics.add(new Metric("vds.server.network.client.tls-connections-established")); metrics.add(new Metric("vds.server.network.server.tls-connections-established")); metrics.add(new Metric("vds.server.network.client.insecure-connections-established")); metrics.add(new Metric("vds.server.network.server.insecure-connections-established")); metrics.add(new Metric("vds.server.network.tls-connections-broken")); metrics.add(new Metric("vds.server.network.failed-tls-config-reloads")); // C++ Fnet metrics metrics.add(new Metric("vds.server.fnet.num-connections")); return metrics; } private static Set<Metric> getConfigServerMetrics() { Set<Metric> metrics =new LinkedHashSet<>(); metrics.add(new Metric("configserver.requests.count")); metrics.add(new Metric("configserver.failedRequests.count")); metrics.add(new Metric("configserver.latency.max")); metrics.add(new Metric("configserver.latency.sum")); metrics.add(new Metric("configserver.latency.count")); metrics.add(new Metric("configserver.latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("configserver.cacheConfigElems.last")); metrics.add(new Metric("configserver.cacheChecksumElems.last")); metrics.add(new Metric("configserver.hosts.last")); metrics.add(new Metric("configserver.delayedResponses.count")); metrics.add(new Metric("configserver.sessionChangeErrors.count")); metrics.add(new Metric("configserver.zkZNodes.last")); metrics.add(new Metric("configserver.zkAvgLatency.last")); metrics.add(new Metric("configserver.zkMaxLatency.last")); metrics.add(new Metric("configserver.zkConnections.last")); metrics.add(new Metric("configserver.zkOutstandingRequests.last")); return metrics; } private static Set<Metric> getContainerMetrics() { Set<Metric> metrics = new LinkedHashSet<>(); addMetric(metrics, "jdisc.http.requests", List.of("rate", "count")); metrics.add(new Metric("handled.requests.count")); metrics.add(new Metric("handled.latency.max")); metrics.add(new Metric("handled.latency.sum")); metrics.add(new Metric("handled.latency.count")); metrics.add(new Metric("handled.latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("serverRejectedRequests.rate")); metrics.add(new Metric("serverRejectedRequests.count")); metrics.add(new Metric("serverThreadPoolSize.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("serverThreadPoolSize.min")); // TODO: Remove in Vespa 8 metrics.add(new Metric("serverThreadPoolSize.max")); metrics.add(new Metric("serverThreadPoolSize.rate")); // TODO: Remove in Vespa 8 metrics.add(new Metric("serverThreadPoolSize.count")); // TODO: Remove in Vespa 8 metrics.add(new Metric("serverThreadPoolSize.last")); metrics.add(new Metric("serverActiveThreads.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("serverActiveThreads.min")); metrics.add(new Metric("serverActiveThreads.max")); metrics.add(new Metric("serverActiveThreads.rate")); // TODO: Remove in Vespa 8 metrics.add(new Metric("serverActiveThreads.sum")); metrics.add(new Metric("serverActiveThreads.count")); metrics.add(new Metric("serverActiveThreads.last")); metrics.add(new Metric("serverNumOpenConnections.average")); metrics.add(new Metric("serverNumOpenConnections.max")); metrics.add(new Metric("serverNumOpenConnections.last")); metrics.add(new Metric("serverNumConnections.average")); metrics.add(new Metric("serverNumConnections.max")); metrics.add(new Metric("serverNumConnections.last")); { List<String> suffices = List.of("sum", "count", "last", "min", "max"); addMetric(metrics, "jdisc.thread_pool.unhandled_exceptions", suffices); addMetric(metrics, "jdisc.thread_pool.work_queue.capacity", suffices); addMetric(metrics, "jdisc.thread_pool.work_queue.size", suffices); } metrics.add(new Metric("httpapi_latency.max")); metrics.add(new Metric("httpapi_latency.sum")); metrics.add(new Metric("httpapi_latency.count")); metrics.add(new Metric("httpapi_latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("httpapi_pending.max")); metrics.add(new Metric("httpapi_pending.sum")); metrics.add(new Metric("httpapi_pending.count")); metrics.add(new Metric("httpapi_pending.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("httpapi_num_operations.rate")); metrics.add(new Metric("httpapi_num_updates.rate")); metrics.add(new Metric("httpapi_num_removes.rate")); metrics.add(new Metric("httpapi_num_puts.rate")); metrics.add(new Metric("httpapi_succeeded.rate")); metrics.add(new Metric("httpapi_failed.rate")); metrics.add(new Metric("httpapi_parse_error.rate")); metrics.add(new Metric("mem.heap.total.average")); metrics.add(new Metric("mem.heap.free.average")); metrics.add(new Metric("mem.heap.used.average")); metrics.add(new Metric("mem.heap.used.max")); metrics.add(new Metric("jdisc.memory_mappings.max")); metrics.add(new Metric("jdisc.open_file_descriptors.max")); metrics.add(new Metric("jdisc.gc.count.average")); metrics.add(new Metric("jdisc.gc.count.max")); metrics.add(new Metric("jdisc.gc.count.last")); metrics.add(new Metric("jdisc.gc.ms.average")); metrics.add(new Metric("jdisc.gc.ms.max")); metrics.add(new Metric("jdisc.gc.ms.last")); metrics.add(new Metric("jdisc.deactivated_containers.total.last")); metrics.add(new Metric("jdisc.deactivated_containers.with_retained_refs.last")); metrics.add(new Metric("athenz-tenant-cert.expiry.seconds.last")); metrics.add(new Metric("jdisc.http.request.prematurely_closed.rate")); metrics.add(new Metric("http.status.1xx.rate")); metrics.add(new Metric("http.status.2xx.rate")); metrics.add(new Metric("http.status.3xx.rate")); metrics.add(new Metric("http.status.4xx.rate")); metrics.add(new Metric("http.status.5xx.rate")); metrics.add(new Metric("http.status.401.rate")); metrics.add(new Metric("http.status.403.rate")); metrics.add(new Metric("jdisc.http.request.uri_length.max")); metrics.add(new Metric("jdisc.http.request.uri_length.sum")); metrics.add(new Metric("jdisc.http.request.uri_length.count")); metrics.add(new Metric("jdisc.http.request.uri_length.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("jdisc.http.request.content_size.max")); metrics.add(new Metric("jdisc.http.request.content_size.sum")); metrics.add(new Metric("jdisc.http.request.content_size.count")); metrics.add(new Metric("jdisc.http.request.content_size.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("jdisc.http.ssl.handshake.failure.missing_client_cert.rate")); metrics.add(new Metric("jdisc.http.ssl.handshake.failure.expired_client_cert.rate")); metrics.add(new Metric("jdisc.http.ssl.handshake.failure.invalid_client_cert.rate")); metrics.add(new Metric("jdisc.http.ssl.handshake.failure.incompatible_protocols.rate")); metrics.add(new Metric("jdisc.http.ssl.handshake.failure.incompatible_ciphers.rate")); metrics.add(new Metric("jdisc.http.ssl.handshake.failure.unknown.rate")); metrics.add(new Metric("jdisc.http.handler.unhandled_exceptions.rate")); addMetric(metrics, "jdisc.http.jetty.threadpool.thread.max", List.of("last")); addMetric(metrics, "jdisc.http.jetty.threadpool.thread.reserved", List.of("last")); addMetric(metrics, "jdisc.http.jetty.threadpool.thread.busy", List.of("sum", "count", "min", "max")); addMetric(metrics, "jdisc.http.jetty.threadpool.thread.total", List.of("sum", "count", "min", "max")); addMetric(metrics, "jdisc.http.jetty.threadpool.queue.size", List.of("sum", "count", "min", "max")); return metrics; } private static Set<Metric> getClusterControllerMetrics() { Set<Metric> metrics =new LinkedHashSet<>(); metrics.add(new Metric("cluster-controller.down.count.last")); metrics.add(new Metric("cluster-controller.initializing.count.last")); metrics.add(new Metric("cluster-controller.maintenance.count.last")); metrics.add(new Metric("cluster-controller.retired.count.last")); metrics.add(new Metric("cluster-controller.stopping.count.last")); metrics.add(new Metric("cluster-controller.up.count.last")); metrics.add(new Metric("cluster-controller.cluster-state-change.count")); metrics.add(new Metric("cluster-controller.is-master.last")); // TODO(hakonhall): Update this name once persistent "count" metrics has been implemented. // DO NOT RELY ON THIS METRIC YET. metrics.add(new Metric("cluster-controller.node-event.count")); return metrics; } private static Set<Metric> getDocprocMetrics() { Set<Metric> metrics = new LinkedHashSet<>(); // per chain metrics.add(new Metric("documents_processed.rate")); return metrics; } private static Set<Metric> getQrserverMetrics() { Set<Metric> metrics = new LinkedHashSet<>(); metrics.add(new Metric("peak_qps.max")); metrics.add(new Metric("search_connections.max")); metrics.add(new Metric("search_connections.sum")); metrics.add(new Metric("search_connections.count")); metrics.add(new Metric("search_connections.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("active_queries.max")); metrics.add(new Metric("active_queries.sum")); metrics.add(new Metric("active_queries.count")); metrics.add(new Metric("active_queries.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("feed.latency.max")); metrics.add(new Metric("feed.latency.sum")); metrics.add(new Metric("feed.latency.count")); metrics.add(new Metric("feed.latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("feed.http-requests.count")); metrics.add(new Metric("feed.http-requests.rate")); metrics.add(new Metric("queries.rate")); metrics.add(new Metric("query_container_latency.max")); metrics.add(new Metric("query_container_latency.sum")); metrics.add(new Metric("query_container_latency.count")); metrics.add(new Metric("query_container_latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("query_latency.max")); metrics.add(new Metric("query_latency.sum")); metrics.add(new Metric("query_latency.count")); metrics.add(new Metric("query_latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("query_latency.95percentile")); metrics.add(new Metric("query_latency.99percentile")); metrics.add(new Metric("failed_queries.rate")); metrics.add(new Metric("degraded_queries.rate")); metrics.add(new Metric("hits_per_query.max")); metrics.add(new Metric("hits_per_query.sum")); metrics.add(new Metric("hits_per_query.count")); metrics.add(new Metric("hits_per_query.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("query_hit_offset.max")); metrics.add(new Metric("query_hit_offset.sum")); metrics.add(new Metric("query_hit_offset.count")); metrics.add(new Metric("documents_covered.count")); metrics.add(new Metric("documents_total.count")); metrics.add(new Metric("dispatch_internal.rate")); metrics.add(new Metric("dispatch_fdispatch.rate")); metrics.add(new Metric("totalhits_per_query.max")); metrics.add(new Metric("totalhits_per_query.sum")); metrics.add(new Metric("totalhits_per_query.count")); metrics.add(new Metric("totalhits_per_query.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("empty_results.rate")); metrics.add(new Metric("requestsOverQuota.rate")); metrics.add(new Metric("requestsOverQuota.count")); metrics.add(new Metric("relevance.at_1.sum")); metrics.add(new Metric("relevance.at_1.count")); metrics.add(new Metric("relevance.at_1.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("relevance.at_3.sum")); metrics.add(new Metric("relevance.at_3.count")); metrics.add(new Metric("relevance.at_3.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("relevance.at_10.sum")); metrics.add(new Metric("relevance.at_10.count")); metrics.add(new Metric("relevance.at_10.average")); // TODO: Remove in Vespa 8 // Errors from qrserver metrics.add(new Metric("error.timeout.rate")); metrics.add(new Metric("error.backends_oos.rate")); metrics.add(new Metric("error.plugin_failure.rate")); metrics.add(new Metric("error.backend_communication_error.rate")); metrics.add(new Metric("error.empty_document_summaries.rate")); metrics.add(new Metric("error.invalid_query_parameter.rate")); metrics.add(new Metric("error.internal_server_error.rate")); metrics.add(new Metric("error.misconfigured_server.rate")); metrics.add(new Metric("error.invalid_query_transformation.rate")); metrics.add(new Metric("error.result_with_errors.rate")); metrics.add(new Metric("error.unspecified.rate")); metrics.add(new Metric("error.unhandled_exception.rate")); return metrics; } private static void addSearchNodeExecutorMetrics(Set<Metric> metrics, String prefix) { metrics.add(new Metric(prefix + ".queuesize.max")); metrics.add(new Metric(prefix + ".queuesize.sum")); metrics.add(new Metric(prefix + ".queuesize.count")); metrics.add(new Metric(prefix + ".maxpending.last")); // TODO: Remove in Vespa 8 metrics.add(new Metric(prefix + ".accepted.rate")); } private static Set<Metric> getSearchNodeMetrics() { Set<Metric> metrics = new LinkedHashSet<>(); metrics.add(new Metric("content.proton.documentdb.documents.total.last")); metrics.add(new Metric("content.proton.documentdb.documents.ready.last")); metrics.add(new Metric("content.proton.documentdb.documents.active.last")); metrics.add(new Metric("content.proton.documentdb.documents.removed.last")); metrics.add(new Metric("content.proton.documentdb.index.docs_in_memory.last")); metrics.add(new Metric("content.proton.documentdb.disk_usage.last")); metrics.add(new Metric("content.proton.documentdb.memory_usage.allocated_bytes.max")); metrics.add(new Metric("content.proton.transport.query.count.rate")); metrics.add(new Metric("content.proton.docsum.docs.rate")); metrics.add(new Metric("content.proton.docsum.latency.max")); metrics.add(new Metric("content.proton.docsum.latency.sum")); metrics.add(new Metric("content.proton.docsum.latency.count")); metrics.add(new Metric("content.proton.docsum.latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("content.proton.transport.query.latency.max")); metrics.add(new Metric("content.proton.transport.query.latency.sum")); metrics.add(new Metric("content.proton.transport.query.latency.count")); metrics.add(new Metric("content.proton.transport.query.latency.average")); // TODO: Remove in Vespa 8 // Search protocol metrics.add(new Metric("content.proton.search_protocol.query.latency.max")); metrics.add(new Metric("content.proton.search_protocol.query.latency.sum")); metrics.add(new Metric("content.proton.search_protocol.query.latency.count")); metrics.add(new Metric("content.proton.search_protocol.query.request_size.max")); metrics.add(new Metric("content.proton.search_protocol.query.request_size.sum")); metrics.add(new Metric("content.proton.search_protocol.query.request_size.count")); metrics.add(new Metric("content.proton.search_protocol.query.reply_size.max")); metrics.add(new Metric("content.proton.search_protocol.query.reply_size.sum")); metrics.add(new Metric("content.proton.search_protocol.query.reply_size.count")); metrics.add(new Metric("content.proton.search_protocol.docsum.latency.max")); metrics.add(new Metric("content.proton.search_protocol.docsum.latency.sum")); metrics.add(new Metric("content.proton.search_protocol.docsum.latency.count")); metrics.add(new Metric("content.proton.search_protocol.docsum.request_size.max")); metrics.add(new Metric("content.proton.search_protocol.docsum.request_size.sum")); metrics.add(new Metric("content.proton.search_protocol.docsum.request_size.count")); metrics.add(new Metric("content.proton.search_protocol.docsum.reply_size.max")); metrics.add(new Metric("content.proton.search_protocol.docsum.reply_size.sum")); metrics.add(new Metric("content.proton.search_protocol.docsum.reply_size.count")); metrics.add(new Metric("content.proton.search_protocol.docsum.requested_documents.count")); // Executors shared between all document dbs addSearchNodeExecutorMetrics(metrics, "content.proton.executor.proton"); addSearchNodeExecutorMetrics(metrics, "content.proton.executor.flush"); addSearchNodeExecutorMetrics(metrics, "content.proton.executor.match"); addSearchNodeExecutorMetrics(metrics, "content.proton.executor.docsum"); addSearchNodeExecutorMetrics(metrics, "content.proton.executor.shared"); addSearchNodeExecutorMetrics(metrics, "content.proton.executor.warmup"); // jobs metrics.add(new Metric("content.proton.documentdb.job.total.average")); metrics.add(new Metric("content.proton.documentdb.job.attribute_flush.average")); metrics.add(new Metric("content.proton.documentdb.job.memory_index_flush.average")); metrics.add(new Metric("content.proton.documentdb.job.disk_index_fusion.average")); metrics.add(new Metric("content.proton.documentdb.job.document_store_flush.average")); metrics.add(new Metric("content.proton.documentdb.job.document_store_compact.average")); metrics.add(new Metric("content.proton.documentdb.job.bucket_move.average")); metrics.add(new Metric("content.proton.documentdb.job.lid_space_compact.average")); metrics.add(new Metric("content.proton.documentdb.job.removed_documents_prune.average")); // Threading service (per document db) addSearchNodeExecutorMetrics(metrics, "content.proton.documentdb.threading_service.master"); addSearchNodeExecutorMetrics(metrics, "content.proton.documentdb.threading_service.index"); addSearchNodeExecutorMetrics(metrics, "content.proton.documentdb.threading_service.summary"); addSearchNodeExecutorMetrics(metrics, "content.proton.documentdb.threading_service.index_field_inverter"); addSearchNodeExecutorMetrics(metrics, "content.proton.documentdb.threading_service.index_field_writer"); addSearchNodeExecutorMetrics(metrics, "content.proton.documentdb.threading_service.attribute_field_writer"); // lid space metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_bloat_factor.average")); metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_bloat_factor.average")); metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_bloat_factor.average")); metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_fragmentation_factor.average")); metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_fragmentation_factor.average")); metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_fragmentation_factor.average")); metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_limit.last")); metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_limit.last")); metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_limit.last")); // resource usage metrics.add(new Metric("content.proton.resource_usage.disk.average")); metrics.add(new Metric("content.proton.resource_usage.disk_utilization.average")); metrics.add(new Metric("content.proton.resource_usage.memory.average")); metrics.add(new Metric("content.proton.resource_usage.memory_utilization.average")); metrics.add(new Metric("content.proton.resource_usage.transient_memory.average")); metrics.add(new Metric("content.proton.resource_usage.memory_mappings.max")); metrics.add(new Metric("content.proton.resource_usage.open_file_descriptors.max")); metrics.add(new Metric("content.proton.resource_usage.feeding_blocked.max")); metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.enum_store.average")); metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.multi_value.average")); metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.feeding_blocked.last")); // TODO: Remove in Vespa 8 metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.feeding_blocked.max")); // transaction log metrics.add(new Metric("content.proton.transactionlog.entries.average")); metrics.add(new Metric("content.proton.transactionlog.disk_usage.average")); metrics.add(new Metric("content.proton.transactionlog.replay_time.last")); // document store metrics.add(new Metric("content.proton.documentdb.ready.document_store.disk_usage.average")); metrics.add(new Metric("content.proton.documentdb.ready.document_store.disk_bloat.average")); metrics.add(new Metric("content.proton.documentdb.ready.document_store.max_bucket_spread.average")); metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.allocated_bytes.average")); metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.used_bytes.average")); metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.dead_bytes.average")); metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.onhold_bytes.average")); metrics.add(new Metric("content.proton.documentdb.notready.document_store.disk_usage.average")); metrics.add(new Metric("content.proton.documentdb.notready.document_store.disk_bloat.average")); metrics.add(new Metric("content.proton.documentdb.notready.document_store.max_bucket_spread.average")); metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.allocated_bytes.average")); metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.used_bytes.average")); metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.dead_bytes.average")); metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.onhold_bytes.average")); metrics.add(new Metric("content.proton.documentdb.removed.document_store.disk_usage.average")); metrics.add(new Metric("content.proton.documentdb.removed.document_store.disk_bloat.average")); metrics.add(new Metric("content.proton.documentdb.removed.document_store.max_bucket_spread.average")); metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.allocated_bytes.average")); metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.used_bytes.average")); metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.dead_bytes.average")); metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.onhold_bytes.average")); // document store cache metrics.add(new Metric("content.proton.documentdb.ready.document_store.cache.memory_usage.average")); metrics.add(new Metric("content.proton.documentdb.ready.document_store.cache.hit_rate.average")); metrics.add(new Metric("content.proton.documentdb.ready.document_store.cache.lookups.rate")); metrics.add(new Metric("content.proton.documentdb.ready.document_store.cache.invalidations.rate")); metrics.add(new Metric("content.proton.documentdb.notready.document_store.cache.memory_usage.average")); metrics.add(new Metric("content.proton.documentdb.notready.document_store.cache.hit_rate.average")); metrics.add(new Metric("content.proton.documentdb.notready.document_store.cache.lookups.rate")); metrics.add(new Metric("content.proton.documentdb.notready.document_store.cache.invalidations.rate")); // attribute metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.allocated_bytes.average")); metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.used_bytes.average")); metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.dead_bytes.average")); metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.onhold_bytes.average")); metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.allocated_bytes.average")); metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.used_bytes.average")); metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.dead_bytes.average")); metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.onhold_bytes.average")); // index metrics.add(new Metric("content.proton.documentdb.index.memory_usage.allocated_bytes.average")); metrics.add(new Metric("content.proton.documentdb.index.memory_usage.used_bytes.average")); metrics.add(new Metric("content.proton.documentdb.index.memory_usage.dead_bytes.average")); metrics.add(new Metric("content.proton.documentdb.index.memory_usage.onhold_bytes.average")); // matching metrics.add(new Metric("content.proton.documentdb.matching.queries.rate")); metrics.add(new Metric("content.proton.documentdb.matching.soft_doomed_queries.rate")); metrics.add(new Metric("content.proton.documentdb.matching.query_latency.max")); metrics.add(new Metric("content.proton.documentdb.matching.query_latency.sum")); metrics.add(new Metric("content.proton.documentdb.matching.query_latency.count")); metrics.add(new Metric("content.proton.documentdb.matching.query_latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("content.proton.documentdb.matching.query_collateral_time.max")); // TODO: Remove in Vespa 8 metrics.add(new Metric("content.proton.documentdb.matching.query_collateral_time.sum")); // TODO: Remove in Vespa 8 metrics.add(new Metric("content.proton.documentdb.matching.query_collateral_time.count")); // TODO: Remove in Vespa 8 metrics.add(new Metric("content.proton.documentdb.matching.query_collateral_time.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("content.proton.documentdb.matching.query_setup_time.max")); metrics.add(new Metric("content.proton.documentdb.matching.query_setup_time.sum")); metrics.add(new Metric("content.proton.documentdb.matching.query_setup_time.count")); metrics.add(new Metric("content.proton.documentdb.matching.docs_matched.rate")); // TODO: Consider remove in Vespa 8 metrics.add(new Metric("content.proton.documentdb.matching.docs_matched.max")); metrics.add(new Metric("content.proton.documentdb.matching.docs_matched.sum")); metrics.add(new Metric("content.proton.documentdb.matching.docs_matched.count")); metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.queries.rate")); metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.soft_doomed_queries.rate")); metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.soft_doom_factor.min")); metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.soft_doom_factor.max")); metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.soft_doom_factor.sum")); metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.soft_doom_factor.count")); metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_latency.max")); metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_latency.sum")); metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_latency.count")); metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_collateral_time.max")); // TODO: Remove in Vespa 8 metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_collateral_time.sum")); // TODO: Remove in Vespa 8 metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_collateral_time.count")); // TODO: Remove in Vespa 8 metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_collateral_time.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_setup_time.max")); metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_setup_time.sum")); metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_setup_time.count")); metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.rerank_time.max")); metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.rerank_time.sum")); metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.rerank_time.count")); metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.rerank_time.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.docs_matched.rate")); // TODO: Consider remove in Vespa 8 metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.docs_matched.max")); metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.docs_matched.sum")); metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.docs_matched.count")); metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.limited_queries.rate")); return metrics; } private static Set<Metric> getStorageMetrics() { Set<Metric> metrics = new LinkedHashSet<>(); // TODO: For the purpose of this file and likely elsewhere, all but the last aggregate specifier, // TODO: such as 'average' and 'sum' in the metric names below are just confusing and can be mentally // TODO: disregarded when considering metric names. Consider cleaning up for Vespa 8. metrics.add(new Metric("vds.datastored.alldisks.docs.average")); metrics.add(new Metric("vds.datastored.alldisks.bytes.average")); metrics.add(new Metric("vds.visitor.allthreads.averagevisitorlifetime.sum.max")); metrics.add(new Metric("vds.visitor.allthreads.averagevisitorlifetime.sum.sum")); metrics.add(new Metric("vds.visitor.allthreads.averagevisitorlifetime.sum.count")); metrics.add(new Metric("vds.visitor.allthreads.averagevisitorlifetime.sum.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("vds.visitor.allthreads.averagequeuewait.sum.max")); metrics.add(new Metric("vds.visitor.allthreads.averagequeuewait.sum.sum")); metrics.add(new Metric("vds.visitor.allthreads.averagequeuewait.sum.count")); metrics.add(new Metric("vds.visitor.allthreads.averagequeuewait.sum.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.count.rate")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.count.rate")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.count.rate")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.count.rate")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.createiterator.count.rate")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.count.rate")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_location.sum.count.rate")); metrics.add(new Metric("vds.filestor.alldisks.queuesize.max")); metrics.add(new Metric("vds.filestor.alldisks.queuesize.sum")); metrics.add(new Metric("vds.filestor.alldisks.queuesize.count")); metrics.add(new Metric("vds.filestor.alldisks.queuesize.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("vds.filestor.alldisks.averagequeuewait.sum.max")); metrics.add(new Metric("vds.filestor.alldisks.averagequeuewait.sum.sum")); metrics.add(new Metric("vds.filestor.alldisks.averagequeuewait.sum.count")); metrics.add(new Metric("vds.filestor.alldisks.averagequeuewait.sum.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergemetadatareadlatency.max")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergemetadatareadlatency.sum")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergemetadatareadlatency.count")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergedatareadlatency.max")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergedatareadlatency.sum")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergedatareadlatency.count")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergedatawritelatency.max")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergedatawritelatency.sum")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergedatawritelatency.count")); metrics.add(new Metric("vds.visitor.allthreads.queuesize.count.max")); metrics.add(new Metric("vds.visitor.allthreads.queuesize.count.sum")); metrics.add(new Metric("vds.visitor.allthreads.queuesize.count.count")); metrics.add(new Metric("vds.visitor.allthreads.queuesize.count.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("vds.visitor.allthreads.completed.sum.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("vds.visitor.allthreads.completed.sum.rate")); metrics.add(new Metric("vds.visitor.allthreads.created.sum.rate")); metrics.add(new Metric("vds.visitor.allthreads.failed.sum.rate")); metrics.add(new Metric("vds.visitor.allthreads.averagemessagesendtime.sum.max")); metrics.add(new Metric("vds.visitor.allthreads.averagemessagesendtime.sum.sum")); metrics.add(new Metric("vds.visitor.allthreads.averagemessagesendtime.sum.count")); metrics.add(new Metric("vds.visitor.allthreads.averagemessagesendtime.sum.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("vds.visitor.allthreads.averageprocessingtime.sum.max")); metrics.add(new Metric("vds.visitor.allthreads.averageprocessingtime.sum.sum")); metrics.add(new Metric("vds.visitor.allthreads.averageprocessingtime.sum.count")); metrics.add(new Metric("vds.visitor.allthreads.averageprocessingtime.sum.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.count.rate")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.failed.rate")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.test_and_set_failed.rate")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.latency.max")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.latency.sum")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.latency.count")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.count.rate")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.failed.rate")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.test_and_set_failed.rate")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.latency.max")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.latency.sum")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.latency.count")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.count.rate")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.failed.rate")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.latency.max")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.latency.sum")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.latency.count")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.count.rate")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.failed.rate")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.test_and_set_failed.rate")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.latency.max")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.latency.sum")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.latency.count")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("vds.filestor.alldisks.allthreads.createiterator.latency.max")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.createiterator.latency.sum")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.createiterator.latency.count")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.createiterator.latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.latency.max")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.latency.sum")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.latency.count")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_location.sum.latency.max")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_location.sum.latency.sum")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_location.sum.latency.count")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_location.sum.latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("vds.filestor.alldisks.allthreads.splitbuckets.count.rate")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.joinbuckets.count.rate")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.count.rate")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.failed.rate")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.latency.max")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.latency.sum")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.latency.count")); metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("vds.filestor.alldisks.allthreads.setbucketstates.count.rate")); //Distributor metrics.add(new Metric("vds.idealstate.buckets_rechecking.average")); metrics.add(new Metric("vds.idealstate.idealstate_diff.average")); metrics.add(new Metric("vds.idealstate.buckets_toofewcopies.average")); metrics.add(new Metric("vds.idealstate.buckets_toomanycopies.average")); metrics.add(new Metric("vds.idealstate.buckets.average")); metrics.add(new Metric("vds.idealstate.buckets_notrusted.average")); metrics.add(new Metric("vds.idealstate.delete_bucket.done_ok.rate")); metrics.add(new Metric("vds.idealstate.delete_bucket.done_failed.rate")); metrics.add(new Metric("vds.idealstate.delete_bucket.pending.average")); metrics.add(new Metric("vds.idealstate.merge_bucket.done_ok.rate")); metrics.add(new Metric("vds.idealstate.merge_bucket.done_failed.rate")); metrics.add(new Metric("vds.idealstate.merge_bucket.pending.average")); metrics.add(new Metric("vds.idealstate.split_bucket.done_ok.rate")); metrics.add(new Metric("vds.idealstate.split_bucket.done_failed.rate")); metrics.add(new Metric("vds.idealstate.split_bucket.pending.average")); metrics.add(new Metric("vds.idealstate.join_bucket.done_ok.rate")); metrics.add(new Metric("vds.idealstate.join_bucket.done_failed.rate")); metrics.add(new Metric("vds.idealstate.join_bucket.pending.average")); metrics.add(new Metric("vds.idealstate.garbage_collection.done_ok.rate")); metrics.add(new Metric("vds.idealstate.garbage_collection.done_failed.rate")); metrics.add(new Metric("vds.idealstate.garbage_collection.pending.average")); metrics.add(new Metric("vds.idealstate.garbage_collection.documents_removed.count")); metrics.add(new Metric("vds.idealstate.garbage_collection.documents_removed.rate")); metrics.add(new Metric("vds.distributor.puts.sum.latency.max")); metrics.add(new Metric("vds.distributor.puts.sum.latency.sum")); metrics.add(new Metric("vds.distributor.puts.sum.latency.count")); metrics.add(new Metric("vds.distributor.puts.sum.latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("vds.distributor.puts.sum.ok.rate")); metrics.add(new Metric("vds.distributor.puts.sum.failures.total.rate")); metrics.add(new Metric("vds.distributor.puts.sum.failures.notfound.rate")); metrics.add(new Metric("vds.distributor.puts.sum.failures.test_and_set_failed.rate")); metrics.add(new Metric("vds.distributor.puts.sum.failures.concurrent_mutations.rate")); metrics.add(new Metric("vds.distributor.removes.sum.latency.max")); metrics.add(new Metric("vds.distributor.removes.sum.latency.sum")); metrics.add(new Metric("vds.distributor.removes.sum.latency.count")); metrics.add(new Metric("vds.distributor.removes.sum.latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("vds.distributor.removes.sum.ok.rate")); metrics.add(new Metric("vds.distributor.removes.sum.failures.total.rate")); metrics.add(new Metric("vds.distributor.removes.sum.failures.notfound.rate")); metrics.add(new Metric("vds.distributor.removes.sum.failures.test_and_set_failed.rate")); metrics.add(new Metric("vds.distributor.removes.sum.failures.concurrent_mutations.rate")); metrics.add(new Metric("vds.distributor.updates.sum.latency.max")); metrics.add(new Metric("vds.distributor.updates.sum.latency.sum")); metrics.add(new Metric("vds.distributor.updates.sum.latency.count")); metrics.add(new Metric("vds.distributor.updates.sum.latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("vds.distributor.updates.sum.ok.rate")); metrics.add(new Metric("vds.distributor.updates.sum.failures.total.rate")); metrics.add(new Metric("vds.distributor.updates.sum.failures.notfound.rate")); metrics.add(new Metric("vds.distributor.updates.sum.failures.test_and_set_failed.rate")); metrics.add(new Metric("vds.distributor.updates.sum.failures.concurrent_mutations.rate")); metrics.add(new Metric("vds.distributor.updates.sum.diverging_timestamp_updates.rate")); metrics.add(new Metric("vds.distributor.removelocations.sum.ok.rate")); metrics.add(new Metric("vds.distributor.removelocations.sum.failures.total.rate")); metrics.add(new Metric("vds.distributor.gets.sum.latency.max")); metrics.add(new Metric("vds.distributor.gets.sum.latency.sum")); metrics.add(new Metric("vds.distributor.gets.sum.latency.count")); metrics.add(new Metric("vds.distributor.gets.sum.latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("vds.distributor.gets.sum.ok.rate")); metrics.add(new Metric("vds.distributor.gets.sum.failures.total.rate")); metrics.add(new Metric("vds.distributor.gets.sum.failures.notfound.rate")); metrics.add(new Metric("vds.distributor.visitor.sum.latency.max")); metrics.add(new Metric("vds.distributor.visitor.sum.latency.sum")); metrics.add(new Metric("vds.distributor.visitor.sum.latency.count")); metrics.add(new Metric("vds.distributor.visitor.sum.latency.average")); // TODO: Remove in Vespa 8 metrics.add(new Metric("vds.distributor.visitor.sum.ok.rate")); metrics.add(new Metric("vds.distributor.visitor.sum.failures.total.rate")); metrics.add(new Metric("vds.distributor.docsstored.average")); metrics.add(new Metric("vds.distributor.bytesstored.average")); metrics.add(new Metric("vds.bouncer.clock_skew_aborts.count")); return metrics; } private static void addMetric(Set<Metric> metrics, String metricName, List<String> aggregateSuffices) { for (String suffix : aggregateSuffices) { metrics.add(new Metric(metricName + "." + suffix)); } } }
package com.yahoo.vespa.config.server.deploy; import com.yahoo.cloud.config.ConfigserverConfig; import com.yahoo.component.Version; import com.yahoo.config.application.api.ApplicationPackage; import com.yahoo.config.application.api.DeployLogger; import com.yahoo.config.application.api.FileRegistry; import com.yahoo.config.model.api.ApplicationRoles; import com.yahoo.config.model.api.ConfigDefinitionRepo; import com.yahoo.config.model.api.ConfigServerSpec; import com.yahoo.config.model.api.ContainerEndpoint; import com.yahoo.config.model.api.EndpointCertificateSecrets; import com.yahoo.config.model.api.HostProvisioner; import com.yahoo.config.model.api.Model; import com.yahoo.config.model.api.ModelContext; import com.yahoo.config.model.api.Provisioned; import com.yahoo.config.model.api.Quota; import com.yahoo.config.model.api.Reindexing; import com.yahoo.config.model.api.TenantSecretStore; import com.yahoo.config.provision.ApplicationId; import com.yahoo.config.provision.AthenzDomain; import com.yahoo.config.provision.ClusterSpec; import com.yahoo.config.provision.DockerImage; import com.yahoo.config.provision.HostName; import com.yahoo.config.provision.NodeResources; import com.yahoo.config.provision.TenantName; import com.yahoo.config.provision.Zone; import com.yahoo.container.jdisc.secretstore.SecretStore; import com.yahoo.vespa.config.server.tenant.SecretStoreExternalIdRetriever; import com.yahoo.vespa.flags.FetchVector; import com.yahoo.vespa.flags.FlagSource; import com.yahoo.vespa.flags.Flags; import com.yahoo.vespa.flags.PermanentFlags; import com.yahoo.vespa.flags.StringFlag; import com.yahoo.vespa.flags.UnboundFlag; import java.io.File; import java.net.URI; import java.security.cert.X509Certificate; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.function.ToIntFunction; import static com.yahoo.vespa.config.server.ConfigServerSpec.fromConfig; import static com.yahoo.vespa.flags.FetchVector.Dimension.CLUSTER_TYPE; /** * Implementation of {@link ModelContext} for configserver. * * @author Ulf Lilleengen */ public class ModelContextImpl implements ModelContext { private final ApplicationPackage applicationPackage; private final Optional<Model> previousModel; private final Optional<ApplicationPackage> permanentApplicationPackage; private final DeployLogger deployLogger; private final ConfigDefinitionRepo configDefinitionRepo; private final FileRegistry fileRegistry; private final HostProvisioner hostProvisioner; private final Provisioned provisioned; private final Optional<? extends Reindexing> reindexing; private final ModelContext.Properties properties; private final Optional<File> appDir; private final Optional<DockerImage> wantedDockerImageRepository; /** The version of Vespa we are building a model for */ private final Version modelVespaVersion; /** * The Version of Vespa this model should specify that nodes should use. Note that this * is separate from the version of this model, as upgrades are not immediate. * We may build a config model of Vespa version "a" which specifies that nodes should * use Vespa version "b". The "a" model will then be used by nodes who have not yet * upgraded to version "b". */ private final Version wantedNodeVespaVersion; public ModelContextImpl(ApplicationPackage applicationPackage, Optional<Model> previousModel, Optional<ApplicationPackage> permanentApplicationPackage, DeployLogger deployLogger, ConfigDefinitionRepo configDefinitionRepo, FileRegistry fileRegistry, Optional<? extends Reindexing> reindexing, HostProvisioner hostProvisioner, Provisioned provisioned, ModelContext.Properties properties, Optional<File> appDir, Optional<DockerImage> wantedDockerImageRepository, Version modelVespaVersion, Version wantedNodeVespaVersion) { this.applicationPackage = applicationPackage; this.previousModel = previousModel; this.permanentApplicationPackage = permanentApplicationPackage; this.deployLogger = deployLogger; this.configDefinitionRepo = configDefinitionRepo; this.fileRegistry = fileRegistry; this.reindexing = reindexing; this.hostProvisioner = hostProvisioner; this.provisioned = provisioned; this.properties = properties; this.appDir = appDir; this.wantedDockerImageRepository = wantedDockerImageRepository; this.modelVespaVersion = modelVespaVersion; this.wantedNodeVespaVersion = wantedNodeVespaVersion; } @Override public ApplicationPackage applicationPackage() { return applicationPackage; } @Override public Optional<Model> previousModel() { return previousModel; } @Override public Optional<ApplicationPackage> permanentApplicationPackage() { return permanentApplicationPackage; } /** * Returns the host provisioner to use, or empty to use the default provisioner, * creating hosts from the application package defined hosts */ @Override public HostProvisioner getHostProvisioner() { return hostProvisioner; } @Override public Provisioned provisioned() { return provisioned; } @Override public DeployLogger deployLogger() { return deployLogger; } @Override public ConfigDefinitionRepo configDefinitionRepo() { return configDefinitionRepo; } @Override public FileRegistry getFileRegistry() { return fileRegistry; } @Override public Optional<? extends Reindexing> reindexing() { return reindexing; } @Override public ModelContext.Properties properties() { return properties; } @Override public Optional<File> appDir() { return appDir; } @Override public Optional<DockerImage> wantedDockerImageRepo() { return wantedDockerImageRepository; } @Override public Version modelVespaVersion() { return modelVespaVersion; } @Override public Version wantedNodeVespaVersion() { return wantedNodeVespaVersion; } public static class FeatureFlags implements ModelContext.FeatureFlags { private final NodeResources dedicatedClusterControllerFlavor; private final double defaultTermwiseLimit; private final boolean useThreePhaseUpdates; private final String feedSequencer; private final String responseSequencer; private final int numResponseThreads; private final boolean skipCommunicationManagerThread; private final boolean skipMbusRequestThread; private final boolean skipMbusReplyThread; private final boolean useAsyncMessageHandlingOnSchedule; private final double feedConcurrency; private final boolean useBucketExecutorForPruneRemoved; private final boolean enableFeedBlockInDistributor; private final ToIntFunction<ClusterSpec.Type> metricsProxyMaxHeapSizeInMb; private final List<String> allowedAthenzProxyIdentities; private final boolean tenantIamRole; private final int maxActivationInhibitedOutOfSyncGroups; private final ToIntFunction<ClusterSpec.Type> jvmOmitStackTraceInFastThrow; private final boolean enableCustomAclMapping; private final boolean useExternalRankExpression; private final int numDistributorStripes; public FeatureFlags(FlagSource source, ApplicationId appId) { this.dedicatedClusterControllerFlavor = parseDedicatedClusterControllerFlavor(flagValue(source, appId, Flags.DEDICATED_CLUSTER_CONTROLLER_FLAVOR)); this.defaultTermwiseLimit = flagValue(source, appId, Flags.DEFAULT_TERM_WISE_LIMIT); this.useThreePhaseUpdates = flagValue(source, appId, Flags.USE_THREE_PHASE_UPDATES); this.feedSequencer = flagValue(source, appId, Flags.FEED_SEQUENCER_TYPE); this.responseSequencer = flagValue(source, appId, Flags.RESPONSE_SEQUENCER_TYPE); this.numResponseThreads = flagValue(source, appId, Flags.RESPONSE_NUM_THREADS); this.skipCommunicationManagerThread = flagValue(source, appId, Flags.SKIP_COMMUNICATIONMANAGER_THREAD); this.skipMbusRequestThread = flagValue(source, appId, Flags.SKIP_MBUS_REQUEST_THREAD); this.skipMbusReplyThread = flagValue(source, appId, Flags.SKIP_MBUS_REPLY_THREAD); this.useAsyncMessageHandlingOnSchedule = flagValue(source, appId, Flags.USE_ASYNC_MESSAGE_HANDLING_ON_SCHEDULE); this.feedConcurrency = flagValue(source, appId, Flags.FEED_CONCURRENCY); this.useBucketExecutorForPruneRemoved = flagValue(source, appId, Flags.USE_BUCKET_EXECUTOR_FOR_PRUNE_REMOVED); this.enableFeedBlockInDistributor = flagValue(source, appId, Flags.ENABLE_FEED_BLOCK_IN_DISTRIBUTOR); this.metricsProxyMaxHeapSizeInMb = type -> Flags.METRICS_PROXY_MAX_HEAP_SIZE_IN_MB.bindTo(source).with(CLUSTER_TYPE, type.name()).value(); this.allowedAthenzProxyIdentities = flagValue(source, appId, Flags.ALLOWED_ATHENZ_PROXY_IDENTITIES); this.tenantIamRole = flagValue(source, appId.tenant(), Flags.TENANT_IAM_ROLE); this.maxActivationInhibitedOutOfSyncGroups = flagValue(source, appId, Flags.MAX_ACTIVATION_INHIBITED_OUT_OF_SYNC_GROUPS); this.jvmOmitStackTraceInFastThrow = type -> flagValueAsInt(source, appId, type, PermanentFlags.JVM_OMIT_STACK_TRACE_IN_FAST_THROW); this.enableCustomAclMapping = flagValue(source, appId, Flags.ENABLE_CUSTOM_ACL_MAPPING); this.numDistributorStripes = flagValue(source, appId, Flags.NUM_DISTRIBUTOR_STRIPES); this.useExternalRankExpression = flagValue(source, appId, Flags.USE_EXTERNAL_RANK_EXPRESSION);; } @Override public Optional<NodeResources> dedicatedClusterControllerFlavor() { return Optional.ofNullable(dedicatedClusterControllerFlavor); } @Override public double defaultTermwiseLimit() { return defaultTermwiseLimit; } @Override public boolean useThreePhaseUpdates() { return useThreePhaseUpdates; } @Override public String feedSequencerType() { return feedSequencer; } @Override public String responseSequencerType() { return responseSequencer; } @Override public int defaultNumResponseThreads() { return numResponseThreads; } @Override public boolean skipCommunicationManagerThread() { return skipCommunicationManagerThread; } @Override public boolean skipMbusRequestThread() { return skipMbusRequestThread; } @Override public boolean skipMbusReplyThread() { return skipMbusReplyThread; } @Override public boolean useAsyncMessageHandlingOnSchedule() { return useAsyncMessageHandlingOnSchedule; } @Override public double feedConcurrency() { return feedConcurrency; } @Override public boolean useBucketExecutorForPruneRemoved() { return useBucketExecutorForPruneRemoved; } @Override public boolean enableFeedBlockInDistributor() { return enableFeedBlockInDistributor; } @Override public int metricsProxyMaxHeapSizeInMb(ClusterSpec.Type type) { return metricsProxyMaxHeapSizeInMb.applyAsInt(type); } @Override public List<String> allowedAthenzProxyIdentities() { return allowedAthenzProxyIdentities; } @Override public boolean tenantIamRole() { return tenantIamRole; } @Override public int maxActivationInhibitedOutOfSyncGroups() { return maxActivationInhibitedOutOfSyncGroups; } @Override public String jvmOmitStackTraceInFastThrowOption(ClusterSpec.Type type) { return translateJvmOmitStackTraceInFastThrowIntToString(jvmOmitStackTraceInFastThrow, type); } @Override public boolean enableCustomAclMapping() { return enableCustomAclMapping; } @Override public int numDistributorStripes() { return numDistributorStripes; } @Override public boolean useExternalRankExpressions() { return useExternalRankExpression; } private static <V> V flagValue(FlagSource source, ApplicationId appId, UnboundFlag<? extends V, ?, ?> flag) { return flag.bindTo(source) .with(FetchVector.Dimension.APPLICATION_ID, appId.serializedForm()) .boxedValue(); } private static <V> V flagValue(FlagSource source, TenantName tenant, UnboundFlag<? extends V, ?, ?> flag) { return flag.bindTo(source) .with(FetchVector.Dimension.TENANT_ID, tenant.value()) .boxedValue(); } private static <V> V flagValue(FlagSource source, ApplicationId appId, ClusterSpec.Type clusterType, UnboundFlag<? extends V, ?, ?> flag) { return flag.bindTo(source) .with(FetchVector.Dimension.APPLICATION_ID, appId.serializedForm()) .with(FetchVector.Dimension.CLUSTER_TYPE, clusterType.name()) .boxedValue(); } static int flagValueAsInt(FlagSource source, ApplicationId appId, ClusterSpec.Type clusterType, UnboundFlag<? extends Boolean, ?, ?> flag) { return flagValue(source, appId, clusterType, flag) ? 1 : 0; } private String translateJvmOmitStackTraceInFastThrowIntToString(ToIntFunction<ClusterSpec.Type> function, ClusterSpec.Type clusterType) { return function.applyAsInt(clusterType) == 1 ? "" : "-XX:-OmitStackTraceInFastThrow"; } } public static class Properties implements ModelContext.Properties { private final ModelContext.FeatureFlags featureFlags; private final ApplicationId applicationId; private final boolean multitenant; private final List<ConfigServerSpec> configServerSpecs; private final HostName loadBalancerName; private final URI ztsUrl; private final String athenzDnsSuffix; private final boolean hostedVespa; private final Zone zone; private final Set<ContainerEndpoint> endpoints; private final boolean isBootstrap; private final boolean isFirstTimeDeployment; private final Optional<EndpointCertificateSecrets> endpointCertificateSecrets; private final Optional<AthenzDomain> athenzDomain; private final Optional<ApplicationRoles> applicationRoles; private final Quota quota; private final List<TenantSecretStore> tenantSecretStores; private final SecretStore secretStore; private final StringFlag jvmGCOptionsFlag; private final boolean allowDisableMtls; private final List<X509Certificate> operatorCertificates; public Properties(ApplicationId applicationId, ConfigserverConfig configserverConfig, Zone zone, Set<ContainerEndpoint> endpoints, boolean isBootstrap, boolean isFirstTimeDeployment, FlagSource flagSource, Optional<EndpointCertificateSecrets> endpointCertificateSecrets, Optional<AthenzDomain> athenzDomain, Optional<ApplicationRoles> applicationRoles, Optional<Quota> maybeQuota, List<TenantSecretStore> tenantSecretStores, SecretStore secretStore, List<X509Certificate> operatorCertificates) { this.featureFlags = new FeatureFlags(flagSource, applicationId); this.applicationId = applicationId; this.multitenant = configserverConfig.multitenant() || configserverConfig.hostedVespa() || Boolean.getBoolean("multitenant"); this.configServerSpecs = fromConfig(configserverConfig); this.loadBalancerName = HostName.from(configserverConfig.loadBalancerAddress()); this.ztsUrl = configserverConfig.ztsUrl() != null ? URI.create(configserverConfig.ztsUrl()) : null; this.athenzDnsSuffix = configserverConfig.athenzDnsSuffix(); this.hostedVespa = configserverConfig.hostedVespa(); this.zone = zone; this.endpoints = endpoints; this.isBootstrap = isBootstrap; this.isFirstTimeDeployment = isFirstTimeDeployment; this.endpointCertificateSecrets = endpointCertificateSecrets; this.athenzDomain = athenzDomain; this.applicationRoles = applicationRoles; this.quota = maybeQuota.orElseGet(Quota::unlimited); this.tenantSecretStores = tenantSecretStores; this.secretStore = secretStore; this.jvmGCOptionsFlag = PermanentFlags.JVM_GC_OPTIONS.bindTo(flagSource) .with(FetchVector.Dimension.APPLICATION_ID, applicationId.serializedForm()); this.allowDisableMtls = PermanentFlags.ALLOW_DISABLE_MTLS.bindTo(flagSource) .with(FetchVector.Dimension.APPLICATION_ID, applicationId.serializedForm()).value(); this.operatorCertificates = operatorCertificates; } @Override public ModelContext.FeatureFlags featureFlags() { return featureFlags; } @Override public boolean multitenant() { return multitenant; } @Override public ApplicationId applicationId() { return applicationId; } @Override public List<ConfigServerSpec> configServerSpecs() { return configServerSpecs; } @Override public HostName loadBalancerName() { return loadBalancerName; } @Override public URI ztsUrl() { return ztsUrl; } @Override public String athenzDnsSuffix() { return athenzDnsSuffix; } @Override public boolean hostedVespa() { return hostedVespa; } @Override public Zone zone() { return zone; } @Override public Set<ContainerEndpoint> endpoints() { return endpoints; } @Override public boolean isBootstrap() { return isBootstrap; } @Override public boolean isFirstTimeDeployment() { return isFirstTimeDeployment; } @Override public Optional<EndpointCertificateSecrets> endpointCertificateSecrets() { return endpointCertificateSecrets; } @Override public Optional<AthenzDomain> athenzDomain() { return athenzDomain; } @Override public Optional<ApplicationRoles> applicationRoles() { return applicationRoles; } @Override public Quota quota() { return quota; } @Override public List<TenantSecretStore> tenantSecretStores() { return SecretStoreExternalIdRetriever.populateExternalId(secretStore, applicationId.tenant(), zone.system(), tenantSecretStores); } @Override public String jvmGCOptions(Optional<ClusterSpec.Type> clusterType) { return flagValueForClusterType(jvmGCOptionsFlag, clusterType); } @Override public boolean allowDisableMtls() { return allowDisableMtls; } @Override public List<X509Certificate> operatorCertificates() { return operatorCertificates; } public String flagValueForClusterType(StringFlag flag, Optional<ClusterSpec.Type> clusterType) { return clusterType.map(type -> flag.with(CLUSTER_TYPE, type.name())) .orElse(flag) .value(); } } private static NodeResources parseDedicatedClusterControllerFlavor(String flagValue) { String[] parts = flagValue.split("-"); if (parts.length != 3) return null; return new NodeResources(Double.parseDouble(parts[0]), Double.parseDouble(parts[1]), Double.parseDouble(parts[2]), 0.1, NodeResources.DiskSpeed.any, NodeResources.StorageType.any); } }
package com.sequenceiq.cloudbreak.service.stack.flow; import static com.sequenceiq.cloudbreak.domain.Status.AVAILABLE; import static com.sequenceiq.cloudbreak.domain.Status.DELETE_FAILED; import static com.sequenceiq.cloudbreak.domain.Status.STOPPED; import java.util.HashMap; import java.util.Map; import java.util.Set; import javax.inject.Inject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; import com.sequenceiq.cloudbreak.cloud.connector.CloudConnectorException; import com.sequenceiq.cloudbreak.controller.NotFoundException; import com.sequenceiq.cloudbreak.domain.CloudPlatform; import com.sequenceiq.cloudbreak.domain.Cluster; import com.sequenceiq.cloudbreak.domain.HostMetadata; import com.sequenceiq.cloudbreak.domain.HostMetadataState; import com.sequenceiq.cloudbreak.domain.InstanceGroup; import com.sequenceiq.cloudbreak.domain.InstanceMetaData; import com.sequenceiq.cloudbreak.domain.InstanceStatus; import com.sequenceiq.cloudbreak.domain.Resource; import com.sequenceiq.cloudbreak.domain.ResourceType; import com.sequenceiq.cloudbreak.domain.Stack; import com.sequenceiq.cloudbreak.repository.HostMetadataRepository; import com.sequenceiq.cloudbreak.repository.InstanceGroupRepository; import com.sequenceiq.cloudbreak.repository.InstanceMetaDataRepository; import com.sequenceiq.cloudbreak.repository.ResourceRepository; import com.sequenceiq.cloudbreak.repository.StackUpdater; import com.sequenceiq.cloudbreak.service.cluster.flow.AmbariClusterConnector; import com.sequenceiq.cloudbreak.service.events.CloudbreakEventService; import com.sequenceiq.cloudbreak.service.stack.StackService; import com.sequenceiq.cloudbreak.service.stack.connector.MetadataSetup; @Service public class StackSyncService { private static final Logger LOGGER = LoggerFactory.getLogger(StackSyncService.class); private static final String SYNC_STATUS_REASON = "Synced instance states with the cloud provider."; @Inject private StackService stackService; @Inject private StackUpdater stackUpdater; @Inject private CloudbreakEventService eventService; @Inject private InstanceMetaDataRepository instanceMetaDataRepository; @Inject private InstanceGroupRepository instanceGroupRepository; @Inject private HostMetadataRepository hostMetadataRepository; @Inject private ResourceRepository resourceRepository; @Inject private AmbariClusterConnector ambariClusterConnector; @javax.annotation.Resource private Map<CloudPlatform, MetadataSetup> metadataSetups; public void sync(Long stackId) { Stack stack = stackService.getById(stackId); if (stack.isStackInDeletionPhase() || stack.isModificationInProgress()) { LOGGER.warn("Stack could not be synchronized in {} state!", stack.getStatus()); } else { sync(stack); } } private void sync(Stack stack) { Long stackId = stack.getId(); Set<InstanceMetaData> instances = instanceMetaDataRepository.findAllInStack(stackId); Map<InstanceSyncState, Integer> instanceStateCounts = initInstanceStateCounts(); for (InstanceMetaData instance : instances) { InstanceGroup instanceGroup = instance.getInstanceGroup(); try { MetadataSetup metadataSetup = metadataSetups.get(stack.cloudPlatform()); InstanceSyncState state = metadataSetup.getState(stack, instanceGroup, instance.getInstanceId()); ResourceType instanceResourceType = metadataSetup.getInstanceResourceType(); if (InstanceSyncState.DELETED.equals(state)) { syncDeletedInstance(stack, stackId, instanceStateCounts, instance, instanceGroup, instanceResourceType); } else if (InstanceSyncState.RUNNING.equals(state)) { syncRunningInstance(stack, stackId, instanceStateCounts, instance, instanceGroup); } else if (InstanceSyncState.STOPPED.equals(state)) { syncStoppedInstance(stack, stackId, instanceStateCounts, instance, instanceGroup, instanceResourceType); } else { instanceStateCounts.put(InstanceSyncState.IN_PROGRESS, instanceStateCounts.get(InstanceSyncState.IN_PROGRESS) + 1); } } catch (CloudConnectorException e) { LOGGER.warn(e.getMessage(), e); eventService.fireCloudbreakEvent(stackId, AVAILABLE.name(), String.format("Couldn't retrieve status of instance '%s' from cloud provider.", instance.getInstanceId())); instanceStateCounts.put(InstanceSyncState.UNKNOWN, instanceStateCounts.get(InstanceSyncState.UNKNOWN) + 1); } } handleSyncResult(stack, instanceStateCounts); } private void syncStoppedInstance(Stack stack, Long stackId, Map<InstanceSyncState, Integer> instanceStateCounts, InstanceMetaData instance, InstanceGroup instanceGroup, ResourceType instanceResourceType) { instanceStateCounts.put(InstanceSyncState.STOPPED, instanceStateCounts.get(InstanceSyncState.STOPPED) + 1); if (!instance.isTerminated() && !stack.isStopped()) { LOGGER.info("Instance '{}' is reported as stopped on the cloud provider, setting its state to STOPPED.", instance.getInstanceId()); deleteResourceIfNeeded(stackId, instance, instanceResourceType); updateMetaDataToTerminated(stackId, instance, instanceGroup); } } private void syncRunningInstance(Stack stack, Long stackId, Map<InstanceSyncState, Integer> instanceStateCounts, InstanceMetaData instance, InstanceGroup instanceGroup) { instanceStateCounts.put(InstanceSyncState.RUNNING, instanceStateCounts.get(InstanceSyncState.RUNNING) + 1); if (!instance.isRunning() && !instance.isDecommissioned()) { LOGGER.info("Instance '{}' is reported as running on the cloud provider, updating metadata.", instance.getInstanceId()); createResourceIfNeeded(stack, instance, instanceGroup); updateMetaDataToRunning(stackId, stack.getCluster(), instance, instanceGroup); } } private void syncDeletedInstance(Stack stack, Long stackId, Map<InstanceSyncState, Integer> instanceStateCounts, InstanceMetaData instance, InstanceGroup instanceGroup, ResourceType instanceResourceType) { instanceStateCounts.put(InstanceSyncState.DELETED, instanceStateCounts.get(InstanceSyncState.DELETED) + 1); deleteHostFromCluster(stack, instance); if (!instance.isTerminated()) { LOGGER.info("Instance '{}' is reported as deleted on the cloud provider, setting its state to TERMINATED.", instance.getInstanceId()); deleteResourceIfNeeded(stackId, instance, instanceResourceType); updateMetaDataToTerminated(stackId, instance, instanceGroup); } } private void createResourceIfNeeded(Stack stack, InstanceMetaData instance, InstanceGroup instanceGroup) { ResourceType resourceType = metadataSetups.get(stack.cloudPlatform()).getInstanceResourceType(); if (resourceType != null) { Resource resource = new Resource(resourceType, instance.getInstanceId(), stack, instanceGroup.getGroupName()); resourceRepository.save(resource); } } private void deleteResourceIfNeeded(Long stackId, InstanceMetaData instance, ResourceType instanceResourceType) { Resource resource = resourceRepository.findByStackIdAndNameAndType(stackId, instance.getInstanceId(), instanceResourceType); if (resource != null) { resourceRepository.delete(resource); } } private void handleSyncResult(Stack stack, Map<InstanceSyncState, Integer> instanceStateCounts) { if (instanceStateCounts.get(InstanceSyncState.UNKNOWN) > 0) { eventService.fireCloudbreakEvent(stack.getId(), AVAILABLE.name(), "The state of one or more instances couldn't be determined. Try syncing later."); } else if (instanceStateCounts.get(InstanceSyncState.IN_PROGRESS) > 0) { eventService.fireCloudbreakEvent(stack.getId(), AVAILABLE.name(), "An operation on one or more instances is in progress. Try syncing later."); } else if (instanceStateCounts.get(InstanceSyncState.RUNNING) > 0 && instanceStateCounts.get(InstanceSyncState.STOPPED) > 0) { eventService.fireCloudbreakEvent(stack.getId(), AVAILABLE.name(), "Some instances were stopped on the cloud provider. Restart or terminate them and try syncing later."); } else if (instanceStateCounts.get(InstanceSyncState.RUNNING) > 0) { stackUpdater.updateStackStatus(stack.getId(), AVAILABLE, SYNC_STATUS_REASON); } else if (instanceStateCounts.get(InstanceSyncState.STOPPED) > 0) { stackUpdater.updateStackStatus(stack.getId(), STOPPED, SYNC_STATUS_REASON); } else { stackUpdater.updateStackStatus(stack.getId(), DELETE_FAILED, SYNC_STATUS_REASON); } } private Map<InstanceSyncState, Integer> initInstanceStateCounts() { Map<InstanceSyncState, Integer> instanceStates = new HashMap<>(); instanceStates.put(InstanceSyncState.DELETED, 0); instanceStates.put(InstanceSyncState.STOPPED, 0); instanceStates.put(InstanceSyncState.RUNNING, 0); instanceStates.put(InstanceSyncState.IN_PROGRESS, 0); instanceStates.put(InstanceSyncState.UNKNOWN, 0); return instanceStates; } private void deleteHostFromCluster(Stack stack, InstanceMetaData instanceMetaData) { try { if (stack.getCluster() != null) { HostMetadata hostMetadata = hostMetadataRepository.findHostsInClusterByName(stack.getCluster().getId(), instanceMetaData.getDiscoveryFQDN()); if (hostMetadata == null) { throw new NotFoundException(String.format("Host not found with id '%s'", instanceMetaData.getDiscoveryFQDN())); } if (ambariClusterConnector.isAmbariAvailable(stack)) { if (ambariClusterConnector.deleteHostFromAmbari(stack, hostMetadata)) { hostMetadataRepository.delete(hostMetadata.getId()); eventService.fireCloudbreakEvent(stack.getId(), AVAILABLE.name(), String.format("Deleted host '%s' from Ambari because it is marked as terminated by the cloud provider.", instanceMetaData.getDiscoveryFQDN())); } else { eventService.fireCloudbreakEvent(stack.getId(), AVAILABLE.name(), String.format( "Instance '%s' is terminated but couldn't remove host from Ambari because it still reports the host as healthy." + " Try syncing later.", instanceMetaData.getDiscoveryFQDN())); } } else { hostMetadata.setHostMetadataState(HostMetadataState.UNHEALTHY); hostMetadataRepository.save(hostMetadata); eventService.fireCloudbreakEvent(stack.getId(), AVAILABLE.name(), String.format("Host (%s) state has been updated to: %s", instanceMetaData.getDiscoveryFQDN(), HostMetadataState.UNHEALTHY.name())); } } } catch (Exception e) { LOGGER.error("Host cannot be deleted from cluster: ", e); eventService.fireCloudbreakEvent(stack.getId(), AVAILABLE.name(), String.format("Instance '%s' is marked as terminated by the cloud provider, but couldn't delete the host from Ambari.", instanceMetaData.getDiscoveryFQDN())); } } private void updateMetaDataToTerminated(Long stackId, InstanceMetaData instanceMetaData, InstanceGroup instanceGroup) { instanceGroup.setNodeCount(instanceGroup.getNodeCount() - 1); instanceMetaData.setInstanceStatus(InstanceStatus.TERMINATED); instanceMetaDataRepository.save(instanceMetaData); instanceGroupRepository.save(instanceGroup); eventService.fireCloudbreakEvent(stackId, AVAILABLE.name(), String.format("Deleted instance '%s' from Cloudbreak metadata because it couldn't be found on the cloud provider.", instanceMetaData.getDiscoveryFQDN())); } private void updateMetaDataToRunning(Long stackId, Cluster cluster, InstanceMetaData instanceMetaData, InstanceGroup instanceGroup) { instanceGroup.setNodeCount(instanceGroup.getNodeCount() + 1); HostMetadata hostMetadata = hostMetadataRepository.findHostsInClusterByName(cluster.getId(), instanceMetaData.getDiscoveryFQDN()); if (hostMetadata != null) { LOGGER.info("Instance '{}' was found in the cluster metadata, setting it's state to REGISTERED.", instanceMetaData.getInstanceId()); instanceMetaData.setInstanceStatus(InstanceStatus.REGISTERED); } else { LOGGER.info("Instance '{}' was not found in the cluster metadata, setting it's state to UNREGISTERED.", instanceMetaData.getInstanceId()); instanceMetaData.setInstanceStatus(InstanceStatus.UNREGISTERED); } instanceMetaDataRepository.save(instanceMetaData); instanceGroupRepository.save(instanceGroup); eventService.fireCloudbreakEvent(stackId, AVAILABLE.name(), String.format("Updated metadata of instance '%s' to running because the cloud provider reported it as running.", instanceMetaData.getInstanceId())); } }
package org.bouncycastle.pqc.crypto.mceliece; import java.security.SecureRandom; import org.bouncycastle.crypto.CipherParameters; import org.bouncycastle.crypto.CryptoServicesRegistrar; import org.bouncycastle.crypto.Digest; import org.bouncycastle.crypto.InvalidCipherTextException; import org.bouncycastle.crypto.digests.SHA1Digest; import org.bouncycastle.crypto.params.ParametersWithRandom; import org.bouncycastle.crypto.prng.DigestRandomGenerator; import org.bouncycastle.pqc.crypto.MessageEncryptor; import org.bouncycastle.pqc.math.linearalgebra.ByteUtils; import org.bouncycastle.pqc.math.linearalgebra.GF2Vector; import org.bouncycastle.pqc.math.linearalgebra.IntegerFunctions; /** * This class implements the Kobara/Imai conversion of the McEliecePKCS. This is * a conversion of the McEliecePKCS which is CCA2-secure. For details, see D. * Engelbert, R. Overbeck, A. Schmidt, "A summary of the development of the * McEliece Cryptosystem", technical report. */ public class McElieceKobaraImaiCipher implements MessageEncryptor { /** * The OID of the algorithm. */ public static final String OID = "1.3.6.1.4.1.8301.3.1.3.4.2.3"; private static final String DEFAULT_PRNG_NAME = "SHA1PRNG"; /** * A predetermined public constant. */ public static final byte[] PUBLIC_CONSTANT = "a predetermined public constant" .getBytes(); private Digest messDigest; private SecureRandom sr; McElieceCCA2KeyParameters key; /** * The McEliece main parameters */ private int n, k, t; private boolean forEncryption; public void init(boolean forEncryption, CipherParameters param) { this.forEncryption = forEncryption; if (forEncryption) { if (param instanceof ParametersWithRandom) { ParametersWithRandom rParam = (ParametersWithRandom)param; this.sr = rParam.getRandom(); this.key = (McElieceCCA2PublicKeyParameters)rParam.getParameters(); this.initCipherEncrypt((McElieceCCA2PublicKeyParameters)key); } else { this.sr = CryptoServicesRegistrar.getSecureRandom(); this.key = (McElieceCCA2PublicKeyParameters)param; this.initCipherEncrypt((McElieceCCA2PublicKeyParameters)key); } } else { this.key = (McElieceCCA2PrivateKeyParameters)param; this.initCipherDecrypt((McElieceCCA2PrivateKeyParameters)key); } } /** * Return the key size of the given key object. * * @param key the McElieceCCA2KeyParameters object * @return the key size of the given key object */ public int getKeySize(McElieceCCA2KeyParameters key) { if (key instanceof McElieceCCA2PublicKeyParameters) { return ((McElieceCCA2PublicKeyParameters)key).getN(); } if (key instanceof McElieceCCA2PrivateKeyParameters) { return ((McElieceCCA2PrivateKeyParameters)key).getN(); } throw new IllegalArgumentException("unsupported type"); } private void initCipherEncrypt(McElieceCCA2PublicKeyParameters pubKey) { this.messDigest = Utils.getDigest(pubKey.getDigest()); n = pubKey.getN(); k = pubKey.getK(); t = pubKey.getT(); } private void initCipherDecrypt(McElieceCCA2PrivateKeyParameters privKey) { this.messDigest = Utils.getDigest(privKey.getDigest()); n = privKey.getN(); k = privKey.getK(); t = privKey.getT(); } public byte[] messageEncrypt(byte[] input) { if (!forEncryption) { throw new IllegalStateException("cipher initialised for decryption"); } int c2Len = messDigest.getDigestSize(); int c4Len = k >> 3; int c5Len = (IntegerFunctions.binomial(n, t).bitLength() - 1) >> 3; int mLen = c4Len + c5Len - c2Len - PUBLIC_CONSTANT.length; if (input.length > mLen) { mLen = input.length; } int c1Len = mLen + PUBLIC_CONSTANT.length; int c6Len = c1Len + c2Len - c4Len - c5Len; // compute (m||const) byte[] mConst = new byte[c1Len]; System.arraycopy(input, 0, mConst, 0, input.length); System.arraycopy(PUBLIC_CONSTANT, 0, mConst, mLen, PUBLIC_CONSTANT.length); // generate random r of length c2Len bytes byte[] r = new byte[c2Len]; sr.nextBytes(r); // get PRNG object // get PRNG object DigestRandomGenerator sr0 = new DigestRandomGenerator(new SHA1Digest()); // seed PRNG with r' sr0.addSeedMaterial(r); // generate random sequence ... byte[] c1 = new byte[c1Len]; sr0.nextBytes(c1); // ... and XOR with (m||const) to obtain c1 for (int i = c1Len - 1; i >= 0; i { c1[i] ^= mConst[i]; } // compute H(c1) ... byte[] c2 = new byte[messDigest.getDigestSize()]; messDigest.update(c1, 0, c1.length); messDigest.doFinal(c2, 0); // ... and XOR with r for (int i = c2Len - 1; i >= 0; i { c2[i] ^= r[i]; } // compute (c2||c1) byte[] c2c1 = ByteUtils.concatenate(c2, c1); // split (c2||c1) into (c6||c5||c4), where c4Len is k/8 bytes, c5Len is // floor[log(n|t)]/8 bytes, and c6Len is c1Len+c2Len-c4Len-c5Len (may be byte[] c6 = new byte[0]; if (c6Len > 0) { c6 = new byte[c6Len]; System.arraycopy(c2c1, 0, c6, 0, c6Len); } byte[] c5 = new byte[c5Len]; System.arraycopy(c2c1, c6Len, c5, 0, c5Len); byte[] c4 = new byte[c4Len]; System.arraycopy(c2c1, c6Len + c5Len, c4, 0, c4Len); // convert c4 to vector over GF(2) GF2Vector c4Vec = GF2Vector.OS2VP(k, c4); // convert c5 to error vector z GF2Vector z = Conversions.encode(n, t, c5); // compute encC4 = E(c4, z) byte[] encC4 = McElieceCCA2Primitives.encryptionPrimitive((McElieceCCA2PublicKeyParameters)key, c4Vec, z).getEncoded(); // if c6Len > 0 if (c6Len > 0) { // return (c6||encC4) return ByteUtils.concatenate(c6, encC4); } // else, return encC4 return encC4; } public byte[] messageDecrypt(byte[] input) throws InvalidCipherTextException { if (forEncryption) { throw new IllegalStateException("cipher initialised for decryption"); } int nDiv8 = n >> 3; if (input.length < nDiv8) { throw new InvalidCipherTextException("Bad Padding: Ciphertext too short."); } int c2Len = messDigest.getDigestSize(); int c4Len = k >> 3; int c5Len = (IntegerFunctions.binomial(n, t).bitLength() - 1) >> 3; int c6Len = input.length - nDiv8; // split cipher text (c6||encC4), where c6 may be empty byte[] c6, encC4; if (c6Len > 0) { byte[][] c6EncC4 = ByteUtils.split(input, c6Len); c6 = c6EncC4[0]; encC4 = c6EncC4[1]; } else { c6 = new byte[0]; encC4 = input; } // convert encC4 into vector over GF(2) GF2Vector encC4Vec = GF2Vector.OS2VP(n, encC4); // decrypt encC4Vec to obtain c4 and error vector z GF2Vector[] c4z = McElieceCCA2Primitives.decryptionPrimitive((McElieceCCA2PrivateKeyParameters)key, encC4Vec); byte[] c4 = c4z[0].getEncoded(); GF2Vector z = c4z[1]; // if length of c4 is greater than c4Len (because of padding) ... if (c4.length > c4Len) { // ... truncate the padding bytes c4 = ByteUtils.subArray(c4, 0, c4Len); } // compute c5 = Conv^-1(z) byte[] c5 = Conversions.decode(n, t, z); // if c5 is shorter than expected, pad with leading zeroes if (c5.length < c5Len) { byte[] paddedC5 = new byte[c5Len]; System.arraycopy(c5, 0, paddedC5, c5Len - c5.length, c5.length); c5 = paddedC5; } // compute (c6||c5||c4) byte[] c6c5c4 = ByteUtils.concatenate(c6, c5); c6c5c4 = ByteUtils.concatenate(c6c5c4, c4); // split (c6||c5||c4) into (c2||c1), where c2Len = mdLen and c1Len = // input.length-c2Len bytes. int c1Len = c6c5c4.length - c2Len; byte[][] c2c1 = ByteUtils.split(c6c5c4, c2Len); byte[] c2 = c2c1[0]; byte[] c1 = c2c1[1]; // compute H(c1) ... byte[] rPrime = new byte[messDigest.getDigestSize()]; messDigest.update(c1, 0, c1.length); messDigest.doFinal(rPrime, 0); // ... and XOR with c2 to obtain r' for (int i = c2Len - 1; i >= 0; i { rPrime[i] ^= c2[i]; } // get PRNG object DigestRandomGenerator sr0 = new DigestRandomGenerator(new SHA1Digest()); // seed PRNG with r' sr0.addSeedMaterial(rPrime); // generate random sequence R(r') ... byte[] mConstPrime = new byte[c1Len]; sr0.nextBytes(mConstPrime); // ... and XOR with c1 to obtain (m||const') for (int i = c1Len - 1; i >= 0; i { mConstPrime[i] ^= c1[i]; } if (mConstPrime.length < c1Len) { throw new InvalidCipherTextException("Bad Padding: invalid ciphertext"); } byte[][] temp = ByteUtils.split(mConstPrime, c1Len - PUBLIC_CONSTANT.length); byte[] mr = temp[0]; byte[] constPrime = temp[1]; if (!ByteUtils.equals(constPrime, PUBLIC_CONSTANT)) { throw new InvalidCipherTextException("Bad Padding: invalid ciphertext"); } return mr; } }
/* @test * @summary unit tests for coroutines * @run junit/othervm test.java.dyn.CoroutineTest */ package com.oracle.truffle.coro.test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import com.oracle.truffle.coro.AsymCoroutine; import com.oracle.truffle.coro.Coroutine; import com.oracle.truffle.coro.CoroutineSupport; @SuppressWarnings("unused") public class CoroutineTest { private StringBuilder seq; @BeforeClass public static void setup() { CoroutineSupport support = CoroutineSupport.currentCoroutineSupport(); System.out.println("Coroutine implementation: " + support.getClass().getName()); } @Before public void before() { seq = new StringBuilder(); seq.append("a"); } @Test public void testNewCoroutine() { Coroutine coro = new Coroutine() { @Override protected void run() { seq.append("b"); } }; assertFalse(coro.isFinished()); Coroutine.yield(); seq.append("c"); assertTrue(coro.isFinished()); assertEquals("abc", seq.toString()); } @Test public void symSequence() { Coroutine coro = new Coroutine() { @Override protected void run() { seq.append("c"); for (int i = 0; i < 3; i++) { yield(); seq.append("e"); } } }; seq.append("b"); assertFalse(coro.isFinished()); Coroutine.yield(); for (int i = 0; i < 3; i++) { seq.append("d"); assertFalse(coro.isFinished()); Coroutine.yield(); } seq.append("f"); assertTrue(coro.isFinished()); Coroutine.yield(); seq.append("g"); assertEquals("abcdededefg", seq.toString()); } @Test public void symMultiSequence() { for (int i = 0; i < 10; i++) new Coroutine() { @Override protected void run() { seq.append("c"); yield(); seq.append("e"); } }; seq.append("b"); Coroutine.yield(); seq.append("d"); Coroutine.yield(); seq.append("f"); Coroutine.yield(); seq.append("g"); assertEquals("abccccccccccdeeeeeeeeeefg", seq.toString()); } @Test public void asymSequence() { AsymCoroutine<Void, Void> coro = new AsymCoroutine<Void, Void>() { @Override protected Void run(Void value) { seq.append(value + "b"); Object o = ret(); seq.append(o + "d"); return null; } }; assertFalse(coro.isFinished()); coro.call(); assertFalse(coro.isFinished()); seq.append("c"); coro.call(); seq.append("e"); assertTrue(coro.isFinished()); RuntimeException exception = null; try { coro.call(); } catch (RuntimeException e) { exception = e; } assertNotNull(exception); assertEquals("anullbcnullde", seq.toString()); } @Test public void asymMultiSequence() { AsymCoroutine<Void, Void> coro = null; for (int j = 4; j >= 0; j final AsymCoroutine<Void, Void> last = coro; final int i = j; coro = new AsymCoroutine<Void, Void>() { @Override protected Void run(Void value) { seq.append("b" + i); if (last != null) last.call(); seq.append("c" + i); ret(); seq.append("e" + i); if (last != null) last.call(); seq.append("f" + i); return null; } }; } seq.append("_"); assertFalse(coro.isFinished()); coro.call(); assertFalse(coro.isFinished()); seq.append("d"); coro.call(); seq.append("g"); assertTrue(coro.isFinished()); RuntimeException exception = null; try { coro.call(); } catch (RuntimeException e) { exception = e; } assertNotNull(exception); assertEquals("a_b0b1b2b3b4c4c3c2c1c0de0e1e2e3e4f4f3f2f1f0g", seq.toString()); } @Test public void asymReturnValue() { AsymCoroutine<Integer, Integer> coro = new AsymCoroutine<Integer, Integer>() { @Override protected Integer run(Integer value0) { int value = value0; value = ret(value * 2 + 1); value = ret(value * 2 + 2); value = ret(value * 2 + 3); value = ret(value * 2 + 4); value = ret(value * 2 + 5); return value * 2 + 6; } }; assertFalse(coro.isFinished()); assertEquals(2001, (int) coro.call(1000)); assertEquals(4002, (int) coro.call(2000)); assertEquals(6003, (int) coro.call(3000)); assertEquals(8004, (int) coro.call(4000)); assertEquals(10005, (int) coro.call(5000)); assertEquals(12006, (int) coro.call(6000)); assertTrue(coro.isFinished()); } @Test public void gcTest1() { new Coroutine() { @Override protected void run() { seq.append("c"); Integer v1 = 1; Integer v2 = 14555668; yield(); seq.append("e"); seq.append("(" + v1 + "," + v2 + ")"); } }; seq.append("b"); System.gc(); Coroutine.yield(); System.gc(); seq.append("d"); Coroutine.yield(); seq.append("f"); Coroutine.yield(); seq.append("g"); assertEquals("abcde(1,14555668)fg", seq.toString()); } @Test public void exceptionTest1() { Coroutine coro = new Coroutine() { @Override protected void run() { seq.append("c"); long temp = System.nanoTime(); if (temp != 0) throw new RuntimeException(); yield(); seq.append("e"); } }; seq.append("b"); assertFalse(coro.isFinished()); Coroutine.yield(); seq.append("d"); Coroutine.yield(); seq.append("f"); assertEquals("abcdf", seq.toString()); } @Test public void largeStackframeTest() { new Coroutine() { @Override protected void run() { seq.append("c"); Integer v0 = 10000; Integer v1 = 10001; Integer v2 = 10002; Integer v3 = 10003; Integer v4 = 10004; Integer v5 = 10005; Integer v6 = 10006; Integer v7 = 10007; Integer v8 = 10008; Integer v9 = 10009; Integer v10 = 10010; Integer v11 = 10011; Integer v12 = 10012; Integer v13 = 10013; Integer v14 = 10014; Integer v15 = 10015; Integer v16 = 10016; Integer v17 = 10017; Integer v18 = 10018; Integer v19 = 10019; yield(); int sum = v0 + v1 + v2 + v3 + v4 + v5 + v6 + v7 + v8 + v9 + v10 + v11 + v12 + v13 + v14 + v15 + v16 + v17 + v18 + v19; seq.append("e" + sum); } }; seq.append("b"); System.gc(); Coroutine.yield(); System.gc(); seq.append("d"); Coroutine.yield(); seq.append("f"); assertEquals("abcde200190f", seq.toString()); } @Test public void shaTest() { Coroutine coro = new Coroutine(65536) { @Override protected void run() { try { MessageDigest digest = MessageDigest.getInstance("SHA"); digest.update("TestMessage".getBytes()); seq.append("b"); yield(); seq.append(digest.digest()[0]); } catch (NoSuchAlgorithmException e) { e.printStackTrace(); } } }; Coroutine.yield(); seq.append("c"); assertFalse(coro.isFinished()); Coroutine.yield(); assertTrue(coro.isFinished()); assertEquals("abc72", seq.toString()); } // @Test public void stackoverflowTest() { for (int i = 0; i < 10; i++) { new Coroutine(65536) { int i = 0; @Override protected void run() { System.out.println("start"); try { iter(); } catch (StackOverflowError e) { System.out.println("i: " + i); } System.out.println("asdf"); } private void iter() { System.out.print("."); i++; iter(); } }; } Coroutine.yield(); } }
package io.digdag.standards.operator.td; import com.google.common.base.Optional; import com.treasuredata.client.TDClientException; import io.digdag.client.config.Config; import io.digdag.spi.OperatorContext; import io.digdag.spi.TaskExecutionException; import io.digdag.spi.TaskResult; import io.digdag.standards.operator.DurationInterval; import io.digdag.standards.operator.state.TaskState; import io.digdag.standards.operator.td.TDOperator.SystemDefaultConfig; import io.digdag.util.BaseOperator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Map; abstract class BaseTdJobOperator extends BaseOperator { private static final String DONE_JOB_ID = "doneJobId"; protected final TaskState state; protected final Config params; private final Map<String, String> env; protected final DurationInterval pollInterval; protected final DurationInterval retryInterval; protected final SystemDefaultConfig systemDefaultConfig; protected final BaseTDClientFactory clientFactory; private static Logger logger = LoggerFactory.getLogger(BaseTdJobOperator.class); BaseTdJobOperator(OperatorContext context, Map<String, String> env, Config systemConfig, BaseTDClientFactory clientFactory) { super(context); this.params = request.getConfig().mergeDefault( request.getConfig().getNestedOrGetEmpty("td")); this.state = TaskState.of(request); this.env = env; this.pollInterval = TDOperator.pollInterval(systemConfig); this.retryInterval = TDOperator.retryInterval(systemConfig); this.systemDefaultConfig = TDOperator.systemDefaultConfig(systemConfig); this.clientFactory = clientFactory; } @Override public TaskResult runTask() { try (TDOperator op = TDOperator.fromConfig(clientFactory, systemDefaultConfig, env, params, context.getSecrets().getSecrets("td"))) { return runTask(op); } catch (TDClientException ex) { throw propagateTDClientException(ex); } } public TaskResult runTask(TDOperator op) { Optional<String> doneJobId = state.params().getOptional(DONE_JOB_ID, String.class); TDJobOperator job; if (!doneJobId.isPresent()) { job = op.runJob(state, "job", pollInterval, retryInterval, (jobOperator, domainKey) -> startJob(jobOperator, domainKey)); state.params().set(DONE_JOB_ID, job.getJobId()); } else { job = op.newJobOperator(doneJobId.get()); } // Get the job results TaskResult taskResult = processJobResult(op, job); long numRecords = 0L; try { // job.getJobInfo() may throw error after having retried 3 times numRecords = job.getJobInfo().getNumRecords(); } catch (Exception ex) { logger.warn("Setting num_records failed. Ignoring this error.", ex); } // Set last_job_id param taskResult.getStoreParams() .getNestedOrSetEmpty("td") .set("last_job_id", job.getJobId()) // for compatibility with old style .getNestedOrSetEmpty("last_job") .set("id", job.getJobId()) .set("num_records", numRecords); return taskResult; } protected static Optional<String> poolNameOfEngine(Config params, String engine) { return params.getOptional(engine + "_pool_name", String.class); } protected static TaskExecutionException propagateTDClientException(TDClientException ex) { return new TaskExecutionException(ex); } protected abstract String startJob(TDOperator op, String domainKey); protected TaskResult processJobResult(TDOperator op, TDJobOperator job) { return TaskResult.empty(request); } }
package tools.descartes.dlim.extractor; import java.util.Arrays; import java.util.List; import org.apache.commons.math3.analysis.function.Gaussian; import org.apache.commons.math3.stat.correlation.PearsonsCorrelation; import org.eclipse.swt.widgets.Shell; import org.eclipse.ui.PlatformUI; import tools.descartes.dlim.Sequence; import tools.descartes.dlim.extractor.dialogs.BennosOfLaunchExtractionDialog; import tools.descartes.dlim.generator.ArrivalRateTuple; /** * Extracts a DLIM instance based on the Simple Extraction Model. * * @author Joakim von Kistowski */ public class BennosOfSimpleProcessExtractor implements IDlimExtractor { //EXPECTEDMAXPEAKSPERSEASONAL kopiert aus ModelExtractor private static final int EXPECTEDMAXPEAKSPERSEASONAL = 8; static int numberOfCorrAndLags=200; static double[] corrSaver=new double[numberOfCorrAndLags]; /** * Extracts a DLIM instance based on the Simple Extraction Model. * * @param root the root * @param readArrivalRates the read arrival rates */ @Override public void extractIntoSequence(Sequence root, List<ArrivalRateTuple> arrList) { //getPeriodFromAutocorr() errechnet mit Hilfe von Autokorrelationen //eine Periode int lagOfMax=getPeriodFromAutocorr( arrList); //hier kann ich coden Shell shell = PlatformUI.getWorkbench().getActiveWorkbenchWindow() .getShell(); BennosOfLaunchExtractionDialog dialog = new BennosOfLaunchExtractionDialog(shell, root, arrList); //Falls die Periode (lagOfMax) gut genug //kann man sie setzen if(periodGood(corrSaver,lagOfMax)){ dialog.setSeasonalPeriod(lagOfMax); } //falls die Periode nicht ausreichend gut ist, //Trace betrachten. else{ System.out.println(" "); System.out.println("normales Verfahren mit Autokorrelation war nicht ausreichend."); System.out.println("Führe eine Glättung mit Gaußfilter durch"); System.out.println("und wiederhole das Verfahren"); System.out.println(" "); List<ArrivalRateTuple>arrListGauss= arrList; reduceArrivalRateListNoise(arrListGauss, lagOfMax); //System.out.println(arrList.toString()); //corrSaver wurde auch bei den Berechnungen in reduceArrivalRateListNoise lagOfMax=getPeriodFromAutocorr( arrListGauss); //Periode gut? falls ja kann man sie setzen, sonst //wird jetzt der Standardwert gesetzt. if(periodGood(corrSaver,lagOfMax)){ dialog.setSeasonalPeriod(lagOfMax); } else{ dialog.setSeasonalPeriod(24); } } dialog.open(); } /* * Create a gaussian filter with a given kernel width. */ private static double[] createGaussianFilter(int width) { int filterWidth = width; if (filterWidth % 2 == 0) { filterWidth++; } filterWidth = Math.max(1, filterWidth); double[] filter = new double[filterWidth]; double sigma = Math.sqrt((filterWidth * filterWidth - 1.0) / 12.0); int mean = filterWidth / 2; double filterSum = 0.0; Gaussian gaussian = new Gaussian(mean, sigma); for (int i = 0; i < filterWidth; i++) { filter[i] = gaussian.value(i); filterSum += filter[i]; } // normalize to 1 for (int i = 0; i < filterWidth; i++) { filter[i] = filter[i] / filterSum; } return filter; } /* * Reduce noise within the read arrival rate list by applying a gaussian * filter. */ public static void reduceArrivalRateListNoise(List<ArrivalRateTuple> arrList, int lagOfMax) { //double[] filter = createGaussianFilter((int) (lagOfMax / EXPECTEDMAXPEAKSPERSEASONAL)); double[] filter = createGaussianFilter(9); double[] arrivalRates = new double[arrList.size()]; int index = 0; for (ArrivalRateTuple t : arrList) { arrivalRates[index] = t.getArrivalRate(); index++; } index = 0; for (ArrivalRateTuple t : arrList) { t.setArrivalRate(getFilteredValueAtIndex(arrivalRates, index, filter)); index++; } } /* * Apply gaussian filter to arrival rate at index index. */ private static double getFilteredValueAtIndex(double[] arrivalRateArray, int index, double[] filter) { int filterCenter = filter.length / 2; double filteredValue = 0.0; for (int i = 0; i < filter.length; i++) { filteredValue += filter[i] * getArrivalRateFromArray(arrivalRateArray, index + (i - filterCenter)); } return filteredValue; } /* * Comfort function. Returns 0 for out of bound array indices. */ private static double getArrivalRateFromArray(double[] array, int index) { if (index < 0 || index >= array.length) { return 0.0; } return array[index]; } private static int getPeriodFromAutocorr(List<ArrivalRateTuple> arrList){ //Abschnitt: Autokorrelation zum bestimmen von dominanten Perioden double [] arrRateArray=new double[arrList.size()]; int j=0; for(ArrivalRateTuple art: arrList){ arrRateArray[j]=art.getArrivalRate(); j++; } //aus der Liste arrList zu verschiedenen Lags k. double [] arrRateArrayLag=new double[arrList.size()]; //Korrelationswert und //probieren Vielfache von ihm aus) //k ist Lag-Variable. Wir versuchen mehrere Lags aus //und suchen Korrelation zwischen Original-Trace //und Lag-Trace nahe dem Wert 1. for(int k=0;k<numberOfCorrAndLags;k++){ int l=0; for(ArrivalRateTuple art: arrList){ arrRateArrayLag[(l+k)%(arrList.size())]=art.getArrivalRate(); l++; } //compute Pearson product-moment correlation coefficient. //(A number in the intervall [-1,1]) PearsonsCorrelation corr=new PearsonsCorrelation(); double correlationTraceLagTrace=corr.correlation(arrRateArray, arrRateArrayLag); //speicher Korrelationswert im Array corrSaver[k]=correlationTraceLagTrace; } //Variablen zum Speichern der maximalen Korrelation double maxCorr=0; int lagOfMax=0; //29.05.16 schaue traces an und versuche nun startlag 75, weil for(int k=75;k<corrSaver.length;k++){ if(corrSaver[k]>maxCorr){ maxCorr=corrSaver[k]; lagOfMax=k; } } System.out.println("maximale Korrelation bei Lag "+lagOfMax+" entspricht "+maxCorr); //liefern Vielfache des Lags der maximalen Korrelation auch hohe Korrelationswerte? for(int i=1;i<10;i++){ System.out.println("Korrelation bei "+i+"-fachen Lag"); System.out.println("corrSaver[lagOfMax*"+ i+"] =" + " "+corrSaver[(lagOfMax*i)%corrSaver.length]); } return lagOfMax; } //corrSaver[k] der Korrelation zwischen normalem Trace und Trace // mit Lag k entspricht private static boolean periodGood(double[] corrSaver, int lagOfMax ){ //Unter diesem Kriterium benutzen zum Beispiel der wikipedia_trace, //der ru.wikipedia.org_trace und der WorldCup98_trace die durch die Autokorrelation //seasonalPeriod nutzen. for(int k=1;k<6;k++){ if(corrSaver[(lagOfMax*k)%corrSaver.length]<=0.33){ //versuche mit 33% statt 50% return false; } } return true; } }
package com.silverpeas.interestCenter.ejb; import java.sql.Connection; import java.sql.SQLException; import java.util.List; import javax.ejb.Stateless; import javax.ejb.TransactionAttribute; import javax.ejb.TransactionAttributeType; import com.silverpeas.accesscontrol.ForbiddenRuntimeException; import com.silverpeas.interestCenter.InterestCenterRuntimeException; import com.silverpeas.interestCenter.model.InterestCenter; import com.stratelia.webactiv.util.DBUtil; import com.stratelia.webactiv.util.JNDINames; import com.stratelia.webactiv.util.exception.SilverpeasRuntimeException; /** * InterestCenterBm EJB implementation for detailed comments for each method see remote interface * class * * @see InterestCenterBm */ @Stateless(name = "InterestCenter", description = "Stateless session bean to manage interest centers.") @TransactionAttribute(TransactionAttributeType.SUPPORTS) public class InterestCenterBmEJB implements InterestCenterBm { private static final long serialVersionUID = -5867239072798551540L; @Override public List<InterestCenter> getICByUserID(int userID) { Connection con = DBUtil.makeConnection(JNDINames.INTEREST_CENTER_DATASOURCE); try { return InterestCenterDAO.getICByUserID(con, userID); } catch (SQLException e) { throw new InterestCenterRuntimeException("InterestCenterBmEJB.getICByUserID()", "Pdc.CANNOT_GET_INTEREST_CENTERS", String.valueOf(userID), e); } catch (DAOException e) { throw new InterestCenterRuntimeException("InterestCenterBmEJB.getICByUserID()", "Pdc.CANNOT_GET_INTEREST_CENTERS", String.valueOf(userID), e); } finally { DBUtil.close(con); } } @Override public InterestCenter getICByID(int icPK) { Connection con = DBUtil.makeConnection(JNDINames.INTEREST_CENTER_DATASOURCE); try { return InterestCenterDAO.getICByPK(con, icPK); } catch (SQLException e) { throw new InterestCenterRuntimeException("InterestCenterBmEJB.getICByID()", "InterestCenter.CANNOT_LOAD_LIST_OF_IC", String.valueOf(icPK), e); } catch (DAOException e) { throw new InterestCenterRuntimeException("InterestCenterBmEJB.getICByID()", "InterestCenter.CANNOT_LOAD_LIST_OF_IC", String.valueOf(icPK), e); } finally { DBUtil.close(con); } } @Override @TransactionAttribute(TransactionAttributeType.SUPPORTS) public int createIC(InterestCenter ic) { Connection con = DBUtil.makeConnection(JNDINames.INTEREST_CENTER_DATASOURCE); try { return InterestCenterDAO.createIC(con, ic); } catch (SQLException e) { throw new InterestCenterRuntimeException("InterestCenterBmEJB.createIC()", "Pdc.CANNOT_CREATE_INTEREST_CENTER", ic.toString(), e); } catch (DAOException e) { throw new InterestCenterRuntimeException("InterestCenterBmEJB.createIC()", "Pdc.CANNOT_CREATE_INTEREST_CENTER", ic.toString(), e); } finally { DBUtil.close(con); } } @Override @TransactionAttribute(TransactionAttributeType.SUPPORTS) public void updateIC(InterestCenter ic) { Connection con = DBUtil.makeConnection(JNDINames.INTEREST_CENTER_DATASOURCE); try { InterestCenterDAO.updateIC(con, ic); } catch (SQLException e) { throw new InterestCenterRuntimeException("InterestCenterBmEJB.updateIC()", "Pdc.CANNOT_UPDATE_INTEREST_CENTER", ic.toString(), e); } catch (DAOException e) { throw new InterestCenterRuntimeException("InterestCenterBmEJB.updateIC()", "Pdc.CANNOT_UPDATE_INTEREST_CENTER", ic.toString(), e); } finally { DBUtil.close(con); } } @Override @TransactionAttribute(TransactionAttributeType.SUPPORTS) public void removeICByPK(List<Integer> pks, String userId) { Connection con = DBUtil.makeConnection(JNDINames.INTEREST_CENTER_DATASOURCE); try { //check rights : check that the current user has the rights to delete the interest center int userIdInt = Integer.parseInt(userId); for (Integer icPk : pks) { InterestCenter interestCenter = getICByID(icPk); if(userIdInt != interestCenter.getOwnerID()) { throw new ForbiddenRuntimeException("InterestCenterBmEJB.removeICByPK(ArrayList pks)", SilverpeasRuntimeException.ERROR, "peasCore.RESOURCE_ACCESS_UNAUTHORIZED", "interest center id="+icPk+", userId="+userId); } } //remove InterestCenterDAO.removeICByPK(con, pks); } catch (SQLException e) { throw new InterestCenterRuntimeException("InterestCenterBmEJB.removeICByPK(ArrayList pks)", "Pdc.CANNOT_DELETE_INTEREST_CENTERS", pks.toString(), e); } catch (DAOException e) { throw new InterestCenterRuntimeException("InterestCenterBmEJB.removeICByPK(ArrayList pks)", "Pdc.CANNOT_DELETE_INTEREST_CENTERS", pks.toString(), e); } finally { DBUtil.close(con); } } @Override @TransactionAttribute(TransactionAttributeType.SUPPORTS) public void removeICByPK(int pk) { Connection con = DBUtil.makeConnection(JNDINames.INTEREST_CENTER_DATASOURCE); try { InterestCenterDAO.removeICByPK(con, pk); } catch (SQLException e) { throw new InterestCenterRuntimeException("InterestCenterBmEJB.removeICByPK(int pk)", "Pdc.CANNOT_DELETE_INTEREST_CENTER", String.valueOf(pk), e); } catch (DAOException e) { throw new InterestCenterRuntimeException("InterestCenterBmEJB.removeICByPK(int pk)", "Pdc.CANNOT_DELETE_INTEREST_CENTER", String.valueOf(pk), e); } finally { DBUtil.close(con); } } }
package cz.seznam.euphoria.core.executor.inmem; import cz.seznam.euphoria.core.client.dataset.BatchWindowing; import cz.seznam.euphoria.core.client.dataset.Partitioning; import cz.seznam.euphoria.core.client.dataset.Windowing; import cz.seznam.euphoria.core.client.flow.Flow; import cz.seznam.euphoria.core.client.functional.CombinableReduceFunction; import cz.seznam.euphoria.core.client.functional.UnaryFunction; import cz.seznam.euphoria.core.client.functional.UnaryFunctor; import cz.seznam.euphoria.core.client.graph.DAG; import cz.seznam.euphoria.core.client.graph.Node; import cz.seznam.euphoria.core.client.io.Collector; import cz.seznam.euphoria.core.client.io.DataSink; import cz.seznam.euphoria.core.client.io.DataSource; import cz.seznam.euphoria.core.client.io.Partition; import cz.seznam.euphoria.core.client.io.Reader; import cz.seznam.euphoria.core.client.io.Writer; import cz.seznam.euphoria.core.client.operator.FlatMap; import cz.seznam.euphoria.core.client.operator.Operator; import cz.seznam.euphoria.core.client.operator.ReduceStateByKey; import cz.seznam.euphoria.core.client.operator.Repartition; import cz.seznam.euphoria.core.client.operator.Union; import cz.seznam.euphoria.core.client.util.Pair; import cz.seznam.euphoria.core.executor.ExecUnit; import cz.seznam.euphoria.core.executor.Executor; import cz.seznam.euphoria.core.executor.FlowUnfolder; import cz.seznam.euphoria.core.executor.FlowUnfolder.InputOperator; import cz.seznam.euphoria.core.executor.SerializableUtils; import cz.seznam.euphoria.core.executor.TriggerScheduler; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.SynchronousQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; /** * Inmem executor for testing. */ public class InMemExecutor implements Executor { private static final Logger LOG = LoggerFactory.getLogger(InMemExecutor.class); @FunctionalInterface private interface Supplier<T> { T get() throws EndOfStreamException; } // end of stream signal static class EndOfStream { static EndOfStream get() { return new EndOfStream(); } } private static class EndOfStreamException extends Exception {} static final class PartitionSupplierStream<T> implements Supplier { final Reader<T> reader; final Partition partition; PartitionSupplierStream(Partition<T> partition) { this.partition = partition; try { this.reader = partition.openReader(); } catch (IOException e) { throw new RuntimeException( "Failed to open reader for partition: " + partition, e); } } @Override public Object get() throws EndOfStreamException { if (!this.reader.hasNext()) { try { this.reader.close(); } catch (IOException e) { throw new RuntimeException( "Failed to close reader for partition: " + this.partition); } throw new EndOfStreamException(); } T next = this.reader.next(); BatchWindowing.BatchWindow w = BatchWindowing.get().assignWindows(next).iterator().next(); return new Datum<>(w.getGroup(), w.getLabel(), next); } } /** Partitioned provider of input data for single operator. */ private static final class InputProvider<T> implements Iterable<Supplier<T>> { final List<Supplier<T>> suppliers; InputProvider() { this.suppliers = new ArrayList<>(); } public int size() { return suppliers.size(); } public void add(Supplier<T> s) { suppliers.add(s); } public Supplier<T> get(int i) { return suppliers.get(i); } @Override public Iterator<Supplier<T>> iterator() { return suppliers.iterator(); } Stream<Supplier<T>> stream() { return suppliers.stream(); } } static class QueueCollector<T> implements Collector<T> { static <T> QueueCollector<T> wrap(BlockingQueue<T> queue) { return new QueueCollector<>(queue); } private final BlockingQueue<T> queue; QueueCollector(BlockingQueue<T> queue) { this.queue = queue; } @Override public void collect(T elem) { try { queue.put(elem); } catch (InterruptedException ex) { throw new RuntimeException(ex); } } } private static final class QueueSupplier<T> implements Supplier<T> { static <T> QueueSupplier<T> wrap(BlockingQueue<T> queue) { return new QueueSupplier<>(queue); } private final BlockingQueue<T> queue; QueueSupplier(BlockingQueue<T> queue) { this.queue = queue; } @Override public T get() throws EndOfStreamException { try { T take = queue.take(); if (take instanceof EndOfStream) { throw new EndOfStreamException(); } return take; } catch (InterruptedException ex) { throw new RuntimeException(ex); } } } private static final class ExecutionContext { // map of operator inputs to suppliers Map<Pair<Operator<?, ?>, Operator<?, ?>>, InputProvider<?>> materializedOutputs = Collections.synchronizedMap(new HashMap<>()); // already running operators Set<Operator<?, ?>> runningOperators = Collections.synchronizedSet( new HashSet<>()); private boolean containsKey(Pair<Operator<?, ?>, Operator<?, ?>> d) { return materializedOutputs.containsKey(d); } void add(Operator<?, ?> source, Operator<?, ?> target, InputProvider<?> partitions) { Pair<Operator<?, ?>, Operator<?, ?>> edge = Pair.of(source, target); if (containsKey(edge)) { throw new IllegalArgumentException("Dataset for edge " + edge + " is already materialized!"); } materializedOutputs.put(edge, partitions); } InputProvider<?> get(Operator<?, ?> source, Operator<?, ?> target) { Pair<Operator<?, ?>, Operator<?, ?>> edge = Pair.of(source, target); InputProvider<?> sup = materializedOutputs.get(edge); if (sup == null) { throw new IllegalArgumentException(String.format( "Do not have suppliers for edge %s -> %s (original producer %s )", source, target, source.output().getProducer())); } return sup; } void markRunning(Operator<?, ?> operator) { if (!this.runningOperators.add(operator)) { throw new IllegalStateException("Twice running the same operator?"); } } boolean isRunning(Operator<?, ?> operator) { return runningOperators.contains(operator); } } private final BlockingQueue<Runnable> queue = new SynchronousQueue<>(false); private final ThreadPoolExecutor executor = new ThreadPoolExecutor( 0, Integer.MAX_VALUE, 60, TimeUnit.SECONDS, queue, new ThreadFactory() { ThreadFactory factory = Executors.defaultThreadFactory(); @Override public Thread newThread(Runnable r) { Thread thread = factory.newThread(r); thread.setUncaughtExceptionHandler((Thread t, Throwable e) -> { e.printStackTrace(System.err); }); return thread; } }); private TriggerScheduler triggering = new ProcessingTimeTriggerScheduler(); private volatile int reduceStateByKeyMaxKeysPerWindow = -1; public void setReduceStateByKeyMaxKeysPerWindow(int maxKeyPerWindow) { this.reduceStateByKeyMaxKeysPerWindow = maxKeyPerWindow; } @Override public Future<Integer> submit(Flow flow) { throw new UnsupportedOperationException("unsupported"); } @Override @SuppressWarnings("unchecked") public int waitForCompletion(Flow flow) { // transform the given flow to DAG of basic dag DAG<Operator<?, ?>> dag = FlowUnfolder.unfold(flow, Executor.getBasicOps()); final List<Future> runningTasks = new ArrayList<>(); Collection<Node<Operator<?, ?>>> leafs = dag.getLeafs(); List<ExecUnit> units = ExecUnit.split(dag); if (units.isEmpty()) { throw new IllegalArgumentException("Cannot execute empty flow"); } for (ExecUnit unit : units) { ExecutionContext context = new ExecutionContext(); execUnit(unit, context); runningTasks.addAll(consumeOutputs(unit.getLeafs(), context)); } // extract all processed sinks List<DataSink<?>> sinks = leafs.stream() .map(n -> n.get().output().getOutputSink()) .filter(s -> s != null) .collect(Collectors.toList()); // wait for all threads to finish for (Future f : runningTasks) { try { f.get(); } catch (InterruptedException e) { break; } catch (ExecutionException e) { // when any one of the tasks fails rollback all sinks and fail sinks.forEach(DataSink::rollback); throw new RuntimeException(e); } } // commit all sinks try { for (DataSink<?> s : sinks) { s.commit(); } } catch (IOException e) { throw new RuntimeException(e); } return 0; } /** Read all outputs of given nodes and store them using their sinks. */ @SuppressWarnings("unchecked") private List<Future> consumeOutputs( Collection<Node<Operator<?, ?>>> leafs, ExecutionContext context) { List<Future> tasks = new ArrayList<>(); // consume outputs for (Node<Operator<?, ?>> output : leafs) { DataSink<?> sink = output.get().output().getOutputSink(); final InputProvider<?> provider = context.get(output.get(), null); int part = 0; for (Supplier<?> s : provider) { final Writer writer = sink.openWriter(part++); tasks.add(executor.submit(() -> { try { try { for (;;) { Object elem = s.get(); // ~ swallow EndOfWindow events from leaving // the inmem executor if (elem instanceof EndOfWindow) { continue; } // ~ unwrap the bare bone element from the inmem // specific "Datum" cargo object Datum datum = (Datum) elem; writer.write(datum.element); } } catch (EndOfStreamException ex) { // end of the stream writer.commit(); writer.close(); // and terminate the thread } } catch (IOException ex) { try { writer.rollback(); // propagate exception throw new RuntimeException(ex); } catch (IOException ioex) { LOG.warn("Something went wrong", ioex); // swallow exception } throw new RuntimeException(ex); } finally { try { writer.close(); } catch (IOException ioex) { LOG.warn("Something went wrong", ioex); // swallow exception } } })); } } return tasks; } // ~ unchecked: all is fine, except javac (1.8.0_91) has some issue @SuppressWarnings("unchecked") private InputProvider<?> createStream(DataSource<?> source) { InputProvider<?> ret = new InputProvider<>(); source.getPartitions().stream() .map(PartitionSupplierStream::new) .forEach(ret::add); return ret; } private void execUnit(ExecUnit unit, ExecutionContext context) { unit.getDAG().traverse().forEach(n -> execNode(n, context)); } /** * Execute single operator and return the suppliers for partitions * of output. */ @SuppressWarnings("unchecked") private void execNode( Node<Operator<?, ?>> node, ExecutionContext context) { Operator<?, ?> op = node.get(); final InputProvider<?> output; if (context.isRunning(op)) { return; } if (op instanceof InputOperator) { output = createStream(op.output().getSource()); } else if (op instanceof FlatMap) { output = execMap((Node) node, context); } else if (op instanceof Repartition) { output = execRepartition((Node) node, context); } else if (op instanceof ReduceStateByKey) { output = execReduceStateByKey((Node) node, context); } else if (op instanceof Union) { output = execUnion((Node) node, context); } else { throw new IllegalStateException("Invalid operator: " + op); } context.markRunning(op); // store output for each child if (node.getChildren().size() > 1) { List<List<BlockingQueue<?>>> forkedProviders = new ArrayList<>(); for (Node<Operator<?, ?>> ch : node.getChildren()) { List<BlockingQueue<?>> forkedProviderQueue = new ArrayList<>(); InputProvider<?> forkedProvider = new InputProvider<>(); forkedProviders.add(forkedProviderQueue); for (int p = 0; p < output.size(); p++) { BlockingQueue<?> queue = new ArrayBlockingQueue<>(5000); forkedProviderQueue.add(queue); forkedProvider.add((Supplier) QueueSupplier.wrap(queue)); } context.add(node.get(), ch.get(), forkedProvider); } for (int p = 0; p < output.size(); p++) { int partId = p; Supplier<?> partSup = output.get(p); List<BlockingQueue<?>> outputs = forkedProviders.stream() .map(l -> l.get(partId)).collect(Collectors.toList()); executor.execute(() -> { // copy the original data to all queues for (;;) { try { Object item = partSup.get(); for (BlockingQueue ch : outputs) { try { ch.put(item); } catch (InterruptedException ex) { return; } } } catch (EndOfStreamException ex) { for (BlockingQueue ch : outputs) { try { ch.put(EndOfStream.get()); } catch (InterruptedException e) { // ignore } } return; } } }); } } else if (node.getChildren().size() == 1) { context.add(node.get(), node.getChildren().iterator().next().get(), output); } else { context.add(node.get(), null, output); } } @SuppressWarnings("unchecked") private InputProvider<?> execMap(Node<FlatMap> flatMap, ExecutionContext context) { InputProvider<?> suppliers = context.get( flatMap.getSingleParentOrNull().get(), flatMap.get()); InputProvider<?> ret = new InputProvider<>(); final UnaryFunctor mapper = flatMap.get().getFunctor(); for (Supplier<?> s : suppliers) { final BlockingQueue<?> out = new ArrayBlockingQueue(5000); ret.add((Supplier) QueueSupplier.wrap(out)); executor.execute(() -> { QueueCollector outQ = QueueCollector.wrap(out); DatumCollector outC = new DatumCollector(outQ); try { for (;;) { // read input Object o = s.get(); if (o instanceof EndOfWindow) { outQ.collect(o); } else { Datum d = (Datum) o; // transform outC.assignWindowing(d.group, d.label); mapper.apply(d.element, outC); } } } catch (EndOfStreamException ex) { outQ.collect(EndOfStream.get()); } }); } return ret; } @SuppressWarnings("unchecked") private InputProvider<?> execRepartition( Node<Repartition> repartition, ExecutionContext context) { Partitioning partitioning = repartition.get().getPartitioning(); int numPartitions = partitioning.getNumPartitions(); InputProvider<?> input = context.get( repartition.getSingleParentOrNull().get(), repartition.get()); if (numPartitions <= 0) { throw new IllegalArgumentException("Cannot repartition input to " + numPartitions + " partitions"); } List<BlockingQueue> outputQueues = repartitionSuppliers( input, e -> e, partitioning); InputProvider<?> ret = new InputProvider<>(); outputQueues.stream() .map(QueueSupplier::new) .forEach(s -> ret.add((Supplier) s)); return ret; } private static final class CompositeKey<A, B> { final A first; final B second; CompositeKey(A first, B second) { this.first = first; this.second = second; } @Override public String toString() { return "CompositeKey(" + first + "," + second + ")"; } @Override public boolean equals(Object obj) { if (obj instanceof CompositeKey) { CompositeKey other = (CompositeKey) obj; return first.equals(other.first) && second.equals(other.second); } return false; } @Override public int hashCode() { if (first != null && second != null) { int h = first.hashCode(); int shift = Integer.SIZE >> 1; return ((h >> shift) | (h << shift)) ^ second.hashCode(); } if (first != null) { return first.hashCode(); } if (second != null) { return second.hashCode(); } return 0; } } @SuppressWarnings("unchecked") private InputProvider<?> execReduceStateByKey( Node<ReduceStateByKey> reduceStateByKeyNode, ExecutionContext context) { final UnaryFunction keyExtractor; final ReduceStateByKey reduceStateByKey = reduceStateByKeyNode.get(); if (reduceStateByKey.isGrouped()) { UnaryFunction reduceKeyExtractor = reduceStateByKey.getKeyExtractor(); keyExtractor = (UnaryFunction<Pair, CompositeKey>) (Pair p) -> { return new CompositeKey(p.getFirst(), reduceKeyExtractor.apply(p)); }; } else { keyExtractor = reduceStateByKey.getKeyExtractor(); } InputProvider<?> suppliers = context.get( reduceStateByKeyNode.getSingleParentOrNull().get(), reduceStateByKeyNode.get()); final UnaryFunction valueExtractor = reduceStateByKey.getValueExtractor(); final UnaryFunction stateFactory = reduceStateByKey.getStateFactory(); final Partitioning partitioning = reduceStateByKey.getPartitioning(); final Windowing windowing = reduceStateByKey.getWindowing(); final CombinableReduceFunction stateCombiner = reduceStateByKey.getStateCombiner(); List<BlockingQueue> repartitioned = repartitionSuppliers(suppliers, keyExtractor, partitioning); EndOfWindowBroadcast eowBroadcast = // ~ no need for broadcasts upon "batched and attached windowing" windowing == null || windowing == BatchWindowing.get() ? new EndOfWindowBroadcast.NoopInstance() : new EndOfWindowBroadcast.NotifyingInstance(); InputProvider<?> outputSuppliers = new InputProvider<>(); // consume repartitioned suppliers int i = 0; for (BlockingQueue q : repartitioned) { final BlockingQueue output = new ArrayBlockingQueue(5000); outputSuppliers.add(QueueSupplier.wrap(output)); executor.execute(new ReduceStateByKeyReducer( Integer.toHexString(System.identityHashCode(reduceStateByKey)) + "@" + reduceStateByKey.getName() + ":" + i + "/" + repartitioned.size(), q, output, windowing, keyExtractor, valueExtractor, stateFactory, stateCombiner, SerializableUtils.cloneSerializable(triggering), reduceStateByKey.input().isBounded(), reduceStateByKeyMaxKeysPerWindow, eowBroadcast)); i++; } return outputSuppliers; } @SuppressWarnings("unchecked") private List<BlockingQueue> repartitionSuppliers( InputProvider<?> suppliers, final UnaryFunction keyExtractor, final Partitioning partitioning) { int numInputPartitions = suppliers.size(); final int outputPartitions = partitioning.getNumPartitions() > 0 ? partitioning.getNumPartitions() : numInputPartitions; final List<BlockingQueue> ret = new ArrayList(outputPartitions); for (int i = 0; i < outputPartitions; i++) { ret.add(new ArrayBlockingQueue(5000)); } // count running partition readers CountDownLatch workers = new CountDownLatch(numInputPartitions); // track end-of-window occurrences EndOfWindowCountDown eowCounter = new EndOfWindowCountDown(); for (Supplier s : suppliers) { executor.execute(() -> { try { try { for (;;) { // read input Object o = s.get(); if (o instanceof EndOfWindow) { if (eowCounter.countDown((EndOfWindow) o, numInputPartitions)) { for (BlockingQueue r : ret) { r.put(o); } } } else { Datum d = (Datum) o; // determine partition Object key = keyExtractor.apply(d.element); int partition = (partitioning.getPartitioner().getPartition(key) & Integer.MAX_VALUE) % outputPartitions; // write to the right partition ret.get(partition).put(d); } } } catch (EndOfStreamException ex) { // ~ no-op } } catch (InterruptedException ex) { throw new RuntimeException(ex); } finally { workers.countDown(); } }); } waitForStreamEnds(workers, ret); return ret; } // wait until runningTasks is not zero and then send EOF to all output queues @SuppressWarnings("unchecked") private void waitForStreamEnds( CountDownLatch fire, List<BlockingQueue> outputQueues) { // start a new task that will wait for all read partitions to end executor.execute(() -> { try { fire.await(); } catch (InterruptedException ex) { LOG.warn("waiting-for-stream-ends interrupted"); } // try sending eof to all outputs for (BlockingQueue queue : outputQueues) { try { queue.put(EndOfStream.get()); } catch (InterruptedException ex) { // nop } } }); } @SuppressWarnings("unchecked") private InputProvider<?> execUnion( Node<Union> union, ExecutionContext context) { InputProvider<?> ret = new InputProvider<>(); union.getParents().stream() .flatMap(p -> context.get(p.get(), union.get()).stream()) .forEach(s -> ret.add((Supplier) s)); return ret; } /** * Abort execution of all tasks. */ public void abort() { executor.shutdownNow(); } public InMemExecutor setTriggering(TriggerScheduler triggering) { this.triggering = triggering; return this; } }
package org.fedorahosted.flies.core.rest; import java.util.Set; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import net.openl10n.api.ContentType; import net.openl10n.api.rest.document.Document; import net.openl10n.api.rest.project.Project; import org.apache.commons.lang.StringUtils; import org.fedorahosted.flies.core.model.ProjectIteration; import org.jboss.seam.Component; import org.jboss.seam.annotations.Name; import com.google.common.collect.ImmutableSet; @Name("projectIterationResource") public class ProjectIterationResourceImpl implements ProjectIterationResource{ private ProjectIteration projectIteration; public void setProjectIteration(ProjectIteration projectIteration) { this.projectIteration = projectIteration; } @Override public Project get(String ext) { Project p = new Project("id", "name", "summary"); Set<String> extensions = ImmutableSet.of( StringUtils.split(ext, ',') ); if(extensions != null && extensions.contains("docs")){ p.getDocuments().add( new Document("/path/to/doc.txt", ContentType.TextPlain ) ); } return p; } @Override public DocumentResource getDocument(String documentId) { DocumentResourceImpl docRes = (DocumentResourceImpl) Component.getInstance(DocumentResourceImpl.class, true); //docRes.setProject(); return DocumentResourceImpl.getProxyWrapper(docRes); } // hack to allow sub-resource in resteasy public static ProjectIterationResource getProxyWrapper(final ProjectIterationResource instance){ return new ProjectIterationResource(){ @Override public Project get(String extensions) { return instance.get(extensions); } @Override public DocumentResource getDocument(String documentId) { return instance.getDocument(documentId); } }; } }
package org.curriki.gwt.client.widgets.currikiitem.display; import java.util.ArrayList; import java.util.List; import com.xpn.xwiki.gwt.api.client.Document; import com.xpn.xwiki.gwt.api.client.Attachment; import com.xpn.xwiki.gwt.api.client.XObject; import com.google.gwt.user.client.ui.*; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.Event; import org.curriki.gwt.client.widgets.currikiitem.CurrikiItem; import org.curriki.gwt.client.widgets.upload.UploadWidget; import org.curriki.gwt.client.Constants; import org.curriki.gwt.client.CurrikiService; import org.curriki.gwt.client.CurrikiAsyncCallback; import org.curriki.gwt.client.Main; import org.curriki.gwt.client.utils.Translator; import org.curriki.gwt.client.utils.URLUtils; import org.curriki.gwt.client.utils.Loading; public abstract class ItemDisplay extends AbstractItemDisplay { UploadWidget upload = null; TextBox textbox = null; Label link = null; Image icon = null; TextArea descBox = null; public ItemDisplay(Document doc, CurrikiItem item) { super(doc, item); // panel.setHorizontalAlignment(HasHorizontalAlignment.ALIGN_CENTER); panel.addStyleName("item-panel-display"); initDisplay(doc); } public void changeToEditMode() { if(status != Constants.EDIT) { switchToEdit(); } } public void cancelEditMode() { status = Constants.VIEW; initDisplay(doc); } protected void initEditLink(){ XObject obj = doc.getObject(Constants.EXTERNAL_ASSET_CLASS); textbox = new TextBox(); textbox.setText((String) obj.get(Constants.EXTERNAL_ASSET_LINK_PROPERTY)); panel.add(textbox); textbox.setFocus(true); } protected void initEditDescription() { XObject obj = doc.getObject(Constants.ASSET_CLASS); String desc = (String) obj.get(Constants.ASSET_DESCRIPTION_PROPERTY); descBox = new TextArea(); if (desc != null) descBox.setText(desc); Grid grid = new Grid(1, 2); Label label = getSubtitleLabel("asset.description"); grid.setWidget(0, 0, label); grid.setWidget(0, 1, descBox); panel.add(grid); } protected void initEditAttachment(){ HTML uploadTitle = new HTML(Main.getTranslation("asset.archive.uploadfile") + ":"); uploadTitle.addStyleName("attachment-upload-title"); panel.add(uploadTitle); upload = new UploadWidget(doc.getUploadURL(), false); upload.addStyleName("attachment-upload-field"); if (doc.getAttachments().size() > 0) upload.setFilename(((Attachment)doc.getAttachments().get(0)).getFilename()); upload.addFormHandler(new FormHandler(){ public void onSubmit(FormSubmitEvent formSubmitEvent) { Main.getSingleton().startLoading(); } public void onSubmitComplete(FormSubmitCompleteEvent formSubmitCompleteEvent) { Main.getSingleton().finishLoading(); reloadDocument(); status = Constants.VIEW; } }); panel.add(upload); } public boolean saveUpload() { if (upload == null) { return false; // return false to say it is not an upload } return upload.sendFile(); } public boolean saveDescription() { XObject assetObj = doc.getObject(Constants.ASSET_CLASS); String desc = descBox.getText(); assetObj.set(Constants.ASSET_DESCRIPTION_PROPERTY, desc); List objs = new ArrayList(); objs.add(assetObj); CurrikiService.App.getInstance().saveObjects(objs, new CurrikiAsyncCallback(){ public void onFailure(Throwable throwable) { super.onFailure(throwable); } public void onSuccess(Object object) { super.onSuccess(object); descBox = null; if (!saveUpload()) { reloadDocument(); status = Constants.VIEW; } } }); return true; } public boolean saveLink() { XObject obj = doc.getObject(Constants.EXTERNAL_ASSET_CLASS); String text; text = textbox.getText(); if (!URLUtils.isValidUrl(text)){ Window.alert(Main.getTranslation("asset.invalid_url")); return false; } obj.set(Constants.EXTERNAL_ASSET_LINK_PROPERTY, text); CurrikiService.App.getInstance().saveObject(obj, new CurrikiAsyncCallback(){ public void onFailure(Throwable throwable) { super.onFailure(throwable); panel.add(textbox); } public void onSuccess(Object object) { super.onSuccess(object); panel.remove(0); reloadDocument(); status = Constants.VIEW; } }); return true; } protected void switchToEdit(){ switchToEdit(false); } protected void switchToEdit(boolean force){ panel.clear(); status = Constants.EDIT; CurrikiService.App.getInstance().lockDocument(doc.getFullName(), force, new CurrikiAsyncCallback(){ public void onFailure(Throwable caught){ super.onFailure(caught); status = Constants.VIEW; } public void onSuccess(Object object) { super.onSuccess(object); if (object != null && ((Boolean)object).booleanValue()) { initEdit(); } else { status = Constants.VIEW; if (Window.confirm(Main.getTranslation("asset.asset_locked_force_edit"))){ switchToEdit(true); } else { cancelEditMode(); } } } }); } protected void initDisplayDescription(Document doc) { Label caption = new HTML(){ public void onBrowserEvent(Event event) { item.onBrowserEvent(event); } }; XObject obj = doc.getObject(Constants.ASSET_CLASS); if (obj!=null) caption.setText(obj.getViewProperty(Constants.ASSET_DESCRIPTION_PROPERTY)); caption.setStyleName("item-description"); // caption.setHorizontalAlignment(HasHorizontalAlignment.ALIGN_CENTER); if(panel.getWidgetIndex(caption) == -1) panel.add(caption); } public void initDisplay(Document doc) { panel.clear(); link = new HTML(){ public void onBrowserEvent(Event event) { item.onBrowserEvent(event); } }; link.sinkEvents(Event.ONCLICK | Event.ONDBLCLICK); link.addStyleName("item-link"); link.setText(getURL()); panel.add(link); item.refreshItemInfos(); item.switchHeaderButtonsToViewMode(); } protected abstract String getURL(); public void initDisplayLink(Document doc) { if (textbox != null && panel.getWidgetIndex(textbox) != -1) { panel.remove(textbox); textbox = null; } if (link != null && panel.getWidgetIndex(link) != -1) panel.remove(link); link = new HTML(){ public void onBrowserEvent(Event event) { item.onBrowserEvent(event); } }; link.sinkEvents(Event.ONCLICK | Event.ONDBLCLICK); link.addStyleName("item-link"); Label caption = new HTML(); caption.setText(Main.getTranslation("asset.externallink")); caption.setStyleName("item-description"); if(panel.getWidgetIndex(caption) == -1) panel.add(caption); XObject obj = doc.getObject(Constants.EXTERNAL_ASSET_CLASS); String text = (String) obj.get(Constants.EXTERNAL_ASSET_LINK_PROPERTY); text = URLUtils.breakLinkText(text, 100); link.setText(text); if(panel.getWidgetIndex(link) == -1) panel.add(link); item.refreshItemInfos(); item.switchHeaderButtonsToViewMode(); } protected String getLinkURL() { XObject obj = doc.getObject(Constants.EXTERNAL_ASSET_CLASS); return (String) obj.get(Constants.EXTERNAL_ASSET_LINK_PROPERTY); } protected String getAttURL() { if (doc.getAttachments().size()>0) { Attachment att = (Attachment) doc.getAttachments().get(0); return att.getDownloadUrl(); } else { return ""; } } public void onView() {} public void onDocumentVersionChange() {} /** * Edit mode */ protected abstract void initEdit(); protected Label getSubtitleLabel(String titleKey){ Label label = new HTML(Main.getTranslation(titleKey)); label.addStyleName("curriki-subtitle"); return label; } }
package i2am.filtering.common; import i2am.metadata.DbAdmin; import javax.sql.DataSource; import java.sql.*; public class DbAdapter { private Connection connection; private PreparedStatement preparedStatement; private ResultSet resultSet; private final DbAdmin dbAdmin; private final DataSource ds; private volatile static DbAdapter instance; // private static final String GETTARGETINDEXQUERY = "SELECT COLUMN_INDEX FROM tbl_src_csv_schema WHERE IDX = (SELECT F_TARGET FROM WHERE F_TOPOLOGY = (SELECT IDX FROM tbl_topology WHERE TOPOLOGY_NAME = ?))"; private static final String GETBLOOMHASHFUNCTIONQUERY = "SELECT HASH_FUNCTION1, HASH_FUNCTION2, HASH_FUNCTION3 FROM tbl_params_bloom_filtering WHERE F_TOPOLOGY = (SELECT IDX FROM tbl_topology WHERE TOPOLOGY_NAME = ?)"; private DbAdapter(){ dbAdmin = DbAdmin.getInstance(); ds = dbAdmin.getDataSource(); } public static DbAdapter getInstance() { if(instance == null) { synchronized(DbAdapter.class) { if(instance == null) { instance = new DbAdapter(); } } } return instance; } public int getTargetIndex(String topologyName, String algorithmName) throws SQLException { String query = null; int targetIndex = 0; connection = ds.getConnection(); switch (algorithmName) { case "BLOOM_FILTERING": query = "SELECT COLUMN_INDEX FROM tbl_src_csv_schema WHERE IDX = " + "(SELECT F_TARGET FROM tbl_params_bloom_filtering WHERE F_TOPOLOGY = " + "(SELECT IDX FROM tbl_topology WHERE TOPOLOGY_NAME = ?))"; break; case "KALMAN_FILTERING": query = "SELECT COLUMN_INDEX FROM tbl_src_csv_schema WHERE IDX = " + "(SELECT F_TARGET FROM tbl_params_kalman_filtering WHERE F_TOPOLOGY = " + "(SELECT IDX FROM tbl_topology WHERE TOPOLOGY_NAME = ?))"; break; case "NR_KALMAN_FILTERING": query = "SELECT COLUMN_INDEX FROM tbl_src_csv_schema WHERE IDX = " + "(SELECT F_TARGET FROM tbl_params_noise_recommend_kalman_filtering WHERE F_TOPOLOGY = " + "(SELECT IDX FROM tbl_topology WHERE TOPOLOGY_NAME = ?))"; break; case "I_KALMAN_FILTERING": query = "SELECT COLUMN_INDEX FROM tbl_src_csv_schema WHERE IDX = " + "(SELECT F_TARGET FROM tbl_params_intelligent_kalman_filtering WHERE F_TOPOLOGY = " + "(SELECT IDX FROM tbl_topology WHERE TOPOLOGY_NAME = ?))"; break; } preparedStatement = connection.prepareStatement(query); preparedStatement.setString(1, topologyName); resultSet = preparedStatement.executeQuery(); if(resultSet.next()) { targetIndex = resultSet.getInt("COLUMN_INDEX"); } preparedStatement.close(); resultSet.close(); connection.close(); return targetIndex; // MariDB's target index start from 1 } public String[] getBloomHashFunction(String topologyName) throws SQLException { connection = ds.getConnection(); preparedStatement = connection.prepareStatement(GETBLOOMHASHFUNCTIONQUERY); preparedStatement.setString(1, topologyName); resultSet = preparedStatement.executeQuery(); String hashFunctions[] = new String[3]; if(resultSet.next()) { hashFunctions[0] = resultSet.getString("HASH_FUNCTION1"); hashFunctions[1] = resultSet.getString("HASH_FUNCTION2"); hashFunctions[2] = resultSet.getString("HASH_FUNCTION3"); } preparedStatement.close(); resultSet.close(); connection.close(); return hashFunctions; } }
package com.inepex.ineom.shared.assistedobject; import java.util.List; import com.inepex.ineom.shared.AssistedObjectHandlerFactory; import com.inepex.ineom.shared.IFConsts; import com.inepex.ineom.shared.IneList; import com.inepex.ineom.shared.IneT; import com.inepex.ineom.shared.Relation; import com.inepex.ineom.shared.descriptor.ObjectDesc; /** * this class is for manipulating a KVO's fields * * it's not superclass of AssistedObject for avoid mistypes, but has the same * methods * * see {@link AssistedObjectHandlerFactory}'s java doc too */ public class AssistedObjectChecker { private final AssistedObject assistedObject; public final String descriptorName; public final ObjectDesc objectDescriptor; private final DescriptorChecker checker; public AssistedObjectChecker( AssistedObject assistedObject, String descriptorName, ObjectDesc objectDescriptor) { this.assistedObject = assistedObject; this.descriptorName = descriptorName; this.objectDescriptor = objectDescriptor; this.checker = new RealDescriptorChecker(objectDescriptor, descriptorName); } protected AssistedObjectChecker(String descriptorName) { this.assistedObject = new KeyValueObject(descriptorName); this.descriptorName = descriptorName; this.objectDescriptor = null; this.checker = new DummyDescriptorChecker(); } public AssistedObject getAssistedObject() { return assistedObject; } // setter, getter and contain methods public void set(String key, Boolean value) { checker.checkDescriptorCheckKey(key, IneT.BOOLEAN); assistedObject.set(key, value); } public void set(String key, Double value) { checker.checkDescriptorCheckKey(key, IneT.DOUBLE); assistedObject.set(key, value); } public void set(String key, IneList value) { checker.checkDescriptorCheckKey(key, IneT.LIST); assistedObject.set(key, value); } public void set(String key, Long value) { checker.checkDescriptorCheckKey(key, IneT.LONG); assistedObject.set(key, value); } public void set(String key, Relation value) { checker.checkDescriptorCheckKey(key, IneT.RELATION); assistedObject.set(key, value); } public void set(String key, String value) { checker.checkDescriptorCheckKey(key, IneT.STRING); assistedObject.set(key, value); } public void unSetString(String key) { checker.checkDescriptorCheckKey(key, IneT.STRING); assistedObject.unsetField(key); } public void unSetLong(String key) { checker.checkDescriptorCheckKey(key, IneT.LONG); assistedObject.unsetField(key); } public Boolean getBoolean(String key) { checker.checkDescriptorCheckKey(key, IneT.BOOLEAN); return assistedObject.getBoolean(key); } public Boolean getBooleanUnchecked(String key) { return assistedObject.getBoolean(key); } public Double getDouble(String key) { checker.checkDescriptorCheckKey(key, IneT.DOUBLE); return assistedObject.getDouble(key); } public Double getDoubleUnchecked(String key) { return assistedObject.getDouble(key); } public IneList getList(String key) { checker.checkDescriptorCheckKey(key, IneT.LIST); return assistedObject.getList(key); } public IneList getListUnchecked(String key) { return assistedObject.getList(key); } public Long getLong(String key) { checker.checkDescriptorCheckKey(key, IneT.LONG); return assistedObject.getLong(key); } public Relation getRelation(String key) { checker.checkDescriptorCheckKey(key, IneT.RELATION); return assistedObject.getRelation(key); } public Relation getRelationUnchecked(String key) { return assistedObject.getRelation(key); } public String getString(String key) { checker.checkDescriptorCheckKey(key, IneT.STRING); return assistedObject.getString(key); } public void setUnchecked(String key, Long value) { assistedObject.setUnchecked(key, value); } public void setUnchecked(String key, String value) { assistedObject.setUnchecked(key, value); } public Long getLongUnchecked(String key) { return assistedObject.getLongUnchecked(key); } public String getStringUnchecked(String key) { return assistedObject.getStringUnchecked(key); } public boolean containsString(String key) { checker.checkDescriptorCheckKey(key, IneT.STRING); return assistedObject.containsString(key); } public boolean containsBoolean(String key) { checker.checkDescriptorCheckKey(key, IneT.BOOLEAN); return assistedObject.containsBoolean(key); } public boolean containsDouble(String key) { checker.checkDescriptorCheckKey(key, IneT.DOUBLE); return assistedObject.containsDouble(key); } public boolean containsList(String key) { checker.checkDescriptorCheckKey(key, IneT.LIST); return assistedObject.containsList(key); } public boolean containsLong(String key) { checker.checkDescriptorCheckKey(key, IneT.LONG); return assistedObject.containsLong(key); } public boolean containsRelation(String key) { checker.checkDescriptorCheckKey(key, IneT.RELATION); return assistedObject.containsRelation(key); } // other methods public String getDescriptorName() { return assistedObject.getDescriptorName(); } public List<String> getKeys() { return assistedObject.getKeys(); } public void copyValuesTo(AssistedObject otherKvo) { if (!getDescriptorName().equals(otherKvo.getDescriptorName())) throw new IllegalArgumentException(); assistedObject.copyValuesTo(otherKvo); } public Long getId() { return assistedObject.getId(); } public void setId(Long id) { assistedObject.setId(id); } public boolean isNew() { return assistedObject.isNew(); } public String getValueAsString(String key) { Object o; switch (objectDescriptor.getField(key).getType()) { case BOOLEAN: o = getBoolean(key); return o == null ? null : ((Boolean) o ? IFConsts.TRUE : IFConsts.FALSE); case DOUBLE: o = getDouble(key); return o == null ? null : o.toString(); case LIST: o = getList(key); return o == null ? null : o.toString(); case LONG: o = getLong(key); return o == null ? null : o.toString(); case RELATION: Relation r = getRelation(key); return r == null ? null : r.getDisplayName(); case STRING: o = getString(key); return o == null ? null : o.toString(); case UNDEFINED: return null; default: return null; } } }
package com.iterable.iterableapi; import android.app.Activity; import android.app.Dialog; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Color; import android.graphics.Rect; import android.graphics.drawable.ColorDrawable; import android.net.Uri; import android.util.DisplayMetrics; import android.view.Gravity; import android.view.MotionEvent; import android.view.View; import android.view.Window; import android.view.WindowManager; import android.webkit.JavascriptInterface; import android.webkit.WebResourceError; import android.webkit.WebResourceRequest; import android.webkit.WebResourceResponse; import android.webkit.WebView; import android.webkit.WebViewClient; public class IterableInAppHTMLNotification extends Dialog { static final String HTML_STRING = "html"; final String mimeType = "text/html"; final String encoding = "UTF-8"; final String resizeScript = "javascript:ITBL.resize(document.body.getBoundingClientRect().height)"; static IterableInAppHTMLNotification notification; Context context; IterableWebView webView; String htmlString; public static IterableInAppHTMLNotification instance(Context context, String htmlString) { if (notification == null) { notification = new IterableInAppHTMLNotification(context, htmlString); } //else update return notification; } public static IterableInAppHTMLNotification getInstance() { return notification; } private IterableInAppHTMLNotification(Context context, String htmlString) { super(context, android.R.style.Theme_NoTitleBar); this.context = context; this.htmlString = htmlString; } @Override protected void onStart() { super.onStart(); this.getWindow().setBackgroundDrawable(new ColorDrawable(android.graphics.Color.TRANSPARENT)); // webView = new WebView(context); webView = new IterableWebView(context); webView.createWithHtml(this, htmlString); webView.addJavascriptInterface(this, "ITBL"); setContentView(webView); } @JavascriptInterface public void resize(final float height) { Activity ownerActivity = getOwnerActivity(); getOwnerActivity().runOnUiThread(new Runnable() { @Override public void run() { DisplayMetrics displayMetrics = getOwnerActivity().getResources().getDisplayMetrics(); int webViewHeight = (int) displayMetrics.heightPixels; int webViewWidth = (int) displayMetrics.widthPixels; Window window = notification.getWindow(); WindowManager.LayoutParams wlp = window.getAttributes(); Rect rect = new Rect(1,2,3,4); if (true) {//bottom & top != auto) //Configurable constants float dimAmount = 0.5f; float widthPercentage = .8f; int gravity = Gravity.CENTER; //Gravity.TOP, Gravity.CENTER, Gravity.BOTTOM; int maxHeight = Math.min((int) (height * displayMetrics.scaledDensity), webViewHeight); int maxWidth = Math.min(webViewWidth, (int) (webViewWidth * widthPercentage)); window.setLayout(maxWidth, maxHeight); wlp.gravity = gravity; wlp.dimAmount = dimAmount; wlp.flags = WindowManager.LayoutParams.FLAG_DIM_BEHIND; window.setAttributes(wlp); } else { //bottom/top/left/right = 0; full screen //Is this necessary webView.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { //disables scrolling for full screen return (event.getAction() == MotionEvent.ACTION_MOVE); } }); } } }); } } class IterableWebView extends WebView { final String mimeType = "text/html"; final String encoding = "UTF-8"; IterableWebView(Context context) { super(context); } void createWithHtml(IterableInAppHTMLNotification notificationDialog, String html) { // IterableWebViewClient webViewClient = new IterableWebViewClient(); IterableWebViewClient webViewClient = new IterableWebViewClient(notificationDialog, new IterableInAppWebViewListener()); loadDataWithBaseURL("", html, mimeType, encoding, ""); setWebViewClient(webViewClient); getSettings().setJavaScriptEnabled(true); getSettings().setDomStorageEnabled(true); //don't overscroll setOverScrollMode(WebView.OVER_SCROLL_NEVER); //transparent setBackgroundColor(Color.TRANSPARENT); //resize: getSettings().setJavaScriptEnabled(true); // addJavascriptInterface(webViewClient, "ITBL"); } } class IterableWebViewClient extends WebViewClient { IterableInAppHTMLNotification inAppHTMLNotification; IterableInAppWebViewListener listener; IterableWebViewClient(IterableInAppHTMLNotification inAppHTMLNotification, IterableInAppWebViewListener listener) { this.inAppHTMLNotification = inAppHTMLNotification; this.listener = listener; } @Override public boolean shouldOverrideUrlLoading(WebView view, String url) { //TODO: handle the button click here System.out.println("urlClicked: "+ url); Uri uri = Uri.parse(url); String authority = uri.getAuthority(); listener.close(inAppHTMLNotification); return true; } @Override public WebResourceResponse shouldInterceptRequest (WebView view, WebResourceRequest request) { WebResourceRequest wr = request; //System.out.println("urlClicked: "+ request.getUrl().toString()); return null; } @Override public void onPageStarted (WebView view, String url, Bitmap favicon) { System.out.println("urlClicked: "+ url); view.addJavascriptInterface(inAppHTMLNotification, "ITBL"); } @Override public WebResourceResponse shouldInterceptRequest (WebView view, String url) { String wr = url; //System.out.println("urlClicked: "+ request.getUrl().toString()); return null; } @Override public void onLoadResource(WebView view, String url){ if( url.equals("http://yoururl.com") ){ // do something } } @Override public void onReceivedError(WebView view, int errorCode, String description, String failingUrl) { System.out.println("urlClicked: "+ failingUrl); } @Override public void onReceivedError(WebView view, WebResourceRequest request, WebResourceError error) { System.out.println("urlClicked: "+ error); } @Override public void onReceivedHttpError( WebView view, WebResourceRequest request, WebResourceResponse errorResponse) { System.out.println("urlClicked: "+ errorResponse); } @Override public void onPageFinished(WebView view, String url) { view.loadUrl("javascript:ITBL.resize(document.body.getBoundingClientRect().height)"); //TODO: Do a check to see if a button was clicked super.onPageFinished(view, url); } } class IterableInAppWebViewListener { public void close(IterableInAppHTMLNotification inApp) { inApp.dismiss(); } }
package org.javarosa.formmanager.utility; import org.javarosa.core.Context; /** * An interface for specifying actions to occur at the end of a form * entry activity. * * @author Clayton Sims * @date Jan 30, 2009 * */ public interface ILoadHost { /** * A handler method that will be called upon completion of a form entry activity. * @param context The context of the form entry activity that preceded this call. */ public void returnFromLoading(Context context); }
//snippet-sourcedescription:[SendMessageBatch.java demonstrates how to send batch SMS messages.] //snippet-keyword:[AWS SDK for Java v2] //snippet-keyword:[Amazon Pinpoint] package com.example.pinpoint; //snippet-start:[pinpoint.java2.sendmsg.batch.import] import software.amazon.awssdk.auth.credentials.ProfileCredentialsProvider; import software.amazon.awssdk.regions.Region; import software.amazon.awssdk.services.pinpoint.PinpointClient; import software.amazon.awssdk.services.pinpoint.model.DirectMessageConfiguration; import software.amazon.awssdk.services.pinpoint.model.SMSMessage; import software.amazon.awssdk.services.pinpoint.model.AddressConfiguration; import software.amazon.awssdk.services.pinpoint.model.ChannelType; import software.amazon.awssdk.services.pinpoint.model.MessageRequest; import software.amazon.awssdk.services.pinpoint.model.SendMessagesRequest; import software.amazon.awssdk.services.pinpoint.model.SendMessagesResponse; import software.amazon.awssdk.services.pinpoint.model.MessageResponse; import software.amazon.awssdk.services.pinpoint.model.PinpointException; import java.util.HashMap; import java.util.Map; //snippet-end:[pinpoint.java2.sendmsg.batch.import] public class SendMessageBatch { // The type of SMS message that you want to send. If you plan to send // time-sensitive content, specify TRANSACTIONAL. If you plan to send // marketing-related content, specify PROMOTIONAL. public static String messageType = "TRANSACTIONAL"; // The registered keyword associated with the originating short code. public static String registeredKeyword = "myKeyword"; // The sender ID to use when sending the message. Support for sender ID // varies by country or region. For more information, see public static String senderId = "MySenderID"; public static void main(String[] args) { final String usage = "\n" + "Usage: " + " <message> <appId> <originationNumber> <destinationNumber> <destinationNumber1> \n\n" + "Where:\n" + " message - The body of the message to send.\n\n"+ " appId - The Amazon Pinpoint project/application ID to use when you send this message.\n\n" + " originationNumber - The phone number or short code that you specify has to be associated with your Amazon Pinpoint account. For best results, specify long codes in E.164 format (for example, +1-555-555-5654). "+ " destinationNumber - The recipient's phone number. For best results, you should specify the phone number in E.164 format (for example, +1-555-555-5654). "+ " destinationNumber1 - The second recipient's phone number. For best results, you should specify the phone number in E.164 format (for example, +1-555-555-5654). "; if (args.length != 5) { System.out.println(usage); System.exit(1); } String message = args[0]; String appId = args[1]; String originationNumber = args[2]; String destinationNumber = args[3]; String destinationNumber1 = args[4]; System.out.println("Sending a message" ); PinpointClient pinpoint = PinpointClient.builder() .region(Region.US_EAST_1) .credentialsProvider(ProfileCredentialsProvider.create()) .build(); sendSMSMessage(pinpoint, message, appId, originationNumber, destinationNumber, destinationNumber1); pinpoint.close(); } //snippet-start:[pinpoint.java2.sendmsg.batch.main] public static void sendSMSMessage(PinpointClient pinpoint, String message, String appId, String originationNumber, String destinationNumber, String destinationNumber1) { try { Map<String, AddressConfiguration> addressMap = new HashMap<String, AddressConfiguration>(); AddressConfiguration addConfig = AddressConfiguration.builder() .channelType(ChannelType.SMS) .build(); // Add an entry to the Map object for each number to whom you want to send a message. addressMap.put(destinationNumber, addConfig); addressMap.put(destinationNumber1, addConfig); SMSMessage smsMessage = SMSMessage.builder() .body(message) .messageType(messageType) .originationNumber(originationNumber) .senderId(senderId) .keyword(registeredKeyword) .build(); // Create a DirectMessageConfiguration object. DirectMessageConfiguration direct = DirectMessageConfiguration.builder() .smsMessage(smsMessage) .build(); MessageRequest msgReq = MessageRequest.builder() .addresses(addressMap) .messageConfiguration(direct) .build(); // Create a SendMessagesRequest object. SendMessagesRequest request = SendMessagesRequest.builder() .applicationId(appId) .messageRequest(msgReq) .build(); SendMessagesResponse response= pinpoint.sendMessages(request); MessageResponse msg1 = response.messageResponse(); Map map1 = msg1.result(); // Write out the result of sendMessage. map1.forEach((k, v) -> System.out.println((k + ":" + v))); } catch (PinpointException e) { System.err.println(e.awsErrorDetails().errorMessage()); System.exit(1); } } //snippet-end:[pinpoint.java2.sendmsg.batch.main] }
package org.ihtsdo.otf.mapping.jpa.handlers; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.lang.reflect.InvocationTargetException; import java.nio.charset.StandardCharsets; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; import org.apache.log4j.Logger; import org.ihtsdo.otf.mapping.helpers.ComplexMapRefSetMemberList; import org.ihtsdo.otf.mapping.helpers.LocalException; import org.ihtsdo.otf.mapping.helpers.MapRecordList; import org.ihtsdo.otf.mapping.helpers.MapRefsetPattern; import org.ihtsdo.otf.mapping.helpers.MapUserRole; import org.ihtsdo.otf.mapping.helpers.ProjectSpecificAlgorithmHandler; import org.ihtsdo.otf.mapping.helpers.ReportFrequency; import org.ihtsdo.otf.mapping.helpers.ReportQueryType; import org.ihtsdo.otf.mapping.helpers.ReportResultType; import org.ihtsdo.otf.mapping.helpers.SearchResult; import org.ihtsdo.otf.mapping.helpers.ValidationResult; import org.ihtsdo.otf.mapping.helpers.WorkflowStatus; import org.ihtsdo.otf.mapping.jpa.MapEntryJpa; import org.ihtsdo.otf.mapping.jpa.MapRecordJpa; import org.ihtsdo.otf.mapping.jpa.helpers.LoggerUtility; import org.ihtsdo.otf.mapping.jpa.helpers.TerminologyUtility; import org.ihtsdo.otf.mapping.jpa.services.ContentServiceJpa; import org.ihtsdo.otf.mapping.jpa.services.MappingServiceJpa; import org.ihtsdo.otf.mapping.jpa.services.MetadataServiceJpa; import org.ihtsdo.otf.mapping.jpa.services.ReportServiceJpa; import org.ihtsdo.otf.mapping.jpa.services.WorkflowServiceJpa; import org.ihtsdo.otf.mapping.model.MapAdvice; import org.ihtsdo.otf.mapping.model.MapEntry; import org.ihtsdo.otf.mapping.model.MapProject; import org.ihtsdo.otf.mapping.model.MapRecord; import org.ihtsdo.otf.mapping.model.MapRelation; import org.ihtsdo.otf.mapping.model.MapUser; import org.ihtsdo.otf.mapping.reports.Report; import org.ihtsdo.otf.mapping.reports.ReportDefinition; import org.ihtsdo.otf.mapping.reports.ReportDefinitionJpa; import org.ihtsdo.otf.mapping.reports.ReportJpa; import org.ihtsdo.otf.mapping.reports.ReportResult; import org.ihtsdo.otf.mapping.reports.ReportResultItem; import org.ihtsdo.otf.mapping.reports.ReportResultItemJpa; import org.ihtsdo.otf.mapping.reports.ReportResultJpa; import org.ihtsdo.otf.mapping.rf2.ComplexMapRefSetMember; import org.ihtsdo.otf.mapping.rf2.Concept; import org.ihtsdo.otf.mapping.rf2.Description; import org.ihtsdo.otf.mapping.rf2.LanguageRefSetMember; import org.ihtsdo.otf.mapping.rf2.TreePosition; import org.ihtsdo.otf.mapping.rf2.jpa.ComplexMapRefSetMemberJpa; import org.ihtsdo.otf.mapping.rf2.jpa.ConceptJpa; import org.ihtsdo.otf.mapping.rf2.jpa.SimpleMapRefSetMemberJpa; import org.ihtsdo.otf.mapping.services.ContentService; import org.ihtsdo.otf.mapping.services.MappingService; import org.ihtsdo.otf.mapping.services.MetadataService; import org.ihtsdo.otf.mapping.services.ReportService; import org.ihtsdo.otf.mapping.services.WorkflowService; import org.ihtsdo.otf.mapping.services.helpers.ConfigUtility; import org.ihtsdo.otf.mapping.services.helpers.ReleaseHandler; /** * RF2 implementation of {@link ReleaseHandler}. */ public class ReleaseHandlerJpa implements ReleaseHandler { /** The mapping service. */ private MappingService mappingService; /** The content service. */ private ContentService contentService; /** The content service. */ private MetadataService metadataService; /** The effectiveTime. */ private String effectiveTime; /** The module id. */ private String moduleId; /** The input file. */ private String inputFile; /** The output dir. */ private String outputDir; /** THe flags for writing snapshot and delta. */ private boolean writeSnapshot = false; private boolean writeActiveSnapshot = false; /** The write delta. */ private boolean writeDelta = false; /** The map project. */ private MapProject mapProject = null; /** The algo handler. */ private ProjectSpecificAlgorithmHandler algorithmHandler; /** The map records. */ private List<MapRecord> mapRecords; /** Map of terminology id to error messages. */ Map<String, String> conceptErrors = new HashMap<>(); /** Map of terminology id to map record. */ Map<String, MapRecord> mapRecordMap = new HashMap<>(); /** The default preferred names set (terminologyId -> dpn). */ private Map<String, String> defaultPreferredNames = new HashMap<>(); /** The scope concepts. */ private Map<String, Concept> conceptCache = new HashMap<>(); /** The test mode flag. */ private boolean testModeFlag = false; /** The report statistics. */ private Map<String, Integer> reportStatistics = new HashMap<>(); /** The date format. */ final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMdd"); /** * Records that will not be PUBLISHED because they've been edited during the * release period */ private Set<Long> recentlyEditedRecords = new HashSet<>(); /** Concept ids read in from previous release file **/ private Set<String> conceptsFromReleaseFile = new HashSet<>(); /** The begin log. */ private static Logger beginLog; /** The process log. */ private static Logger processLog; /** The preview finish log. */ private static Logger previewFinishLog; /** The finish log. */ private static Logger finishLog; /** The current logger. */ private static Logger logger; /** * The Enum for statistics reporting. */ private enum Stats { /** The active entries. */ ACTIVE_ENTRIES("Active entries "), /** The concepts mapped. */ CONCEPTS_MAPPED("Concepts mapped "), /** The complex maps. */ COMPLEX_MAPS("Concepts with complex maps "), /** The multiple groups. */ MULTIPLE_GROUPS("Concepts with multiple groups "), /** The always map. */ ALWAYS_MAP("Concepts that always yield a target code "), /** The sometimes map. */ SOMETIMES_MAP("Concepts that at least sometimes yield a target code"), /** The never map. */ NEVER_MAP("Concepts that could not be mapped "), /** The max entries. */ MAX_ENTRIES("Max number of map entries for a concept"), /** The new concepts. */ NEW_CONCEPTS("New concepts mapped this release "), /** The retired concepts. */ RETIRED_CONCEPTS("Concepts mapped retired this release "), /** The changed concepts. */ CHANGED_CONCEPTS("Concept mappings changed this release "); /** The value. */ private String value; /** * Instantiates a {@link Stats} from the specified parameters. * * @param value the value */ private Stats(String value) { this.value = value; } /** * Returns the value. * * @return the value */ public String getValue() { return value; } } /** * Instantiates an empty {@link ReleaseHandlerJpa}. * * @param testModeFlag the test mode flag * @throws Exception the exception */ public ReleaseHandlerJpa(boolean testModeFlag) throws Exception { // instantiate services mappingService = new MappingServiceJpa(); contentService = new ContentServiceJpa(); metadataService = new MetadataServiceJpa(); this.testModeFlag = testModeFlag; // initialize logger - done in setMapProject() } /* see superclass */ @Override public void close() throws Exception { mappingService.close(); contentService.close(); metadataService.close(); } /* see superclass */ @Override public void processRelease() throws Exception { try { // set the logger logger = processLog; logger.info(" Starting processing the release"); // Keep track of all of the created files final List<String> createdFilenames = new ArrayList<>(); // get all map records for this project if (mapRecords == null || mapRecords.isEmpty()) { final MapRecordList mapRecordList = mappingService .getPublishedAndReadyForPublicationMapRecordsForMapProject( mapProject.getId(), null); mapRecords = mapRecordList.getMapRecords(); } // get all scope concept terminology ids for this project logger.info(" Get scope concepts for map project"); Set<String> scopeConceptTerminologyIds = new HashSet<>(); for (final SearchResult sr : mappingService .findConceptsInScope(mapProject.getId(), null).getSearchResults()) { scopeConceptTerminologyIds.add(sr.getTerminologyId()); } // Log config logger.info(" pattern = " + mapProject.getMapRefsetPattern().toString()); logger.info(" rule-based = " + mapProject.isRuleBased()); logger.info(" record count = " + mapRecords.size()); // check that either/both snapshot and delta files have been specified if (!writeSnapshot && !writeDelta) { throw new Exception( "processRelease called with both snapshot and delta flags disabled"); } // Check preconditions // check for supported ref set pattern if (!EnumSet .of(MapRefsetPattern.ComplexMap, MapRefsetPattern.ExtendedMap, MapRefsetPattern.SimpleMap) .contains(mapProject.getMapRefsetPattern())) { throw new Exception("Unsupported map refset pattern - " + mapProject.getMapRefsetPattern()); } // check that effectiveTime and moduleId have been properly specified if (effectiveTime == null || effectiveTime.isEmpty()) { throw new Exception("Effective time must be specified"); } // check module id if (moduleId == null || moduleId.isEmpty()) { throw new LocalException("Module id must be specified"); } boolean moduleFound = false; if (metadataService.getModules(mapProject.getSourceTerminology(), mapProject.getSourceTerminologyVersion()).containsKey(moduleId)) { moduleFound = true; } // Also check the destination terminology (this is to handle bidirectional // maps that are both controlled by a single module id concept if (metadataService .getModules(mapProject.getDestinationTerminology(), mapProject.getDestinationTerminologyVersion()) .containsKey(moduleId)) { moduleFound = true; } // In the edge-case where the correct module Id is not associated with a // valid concept, allow "REMOVE_THIS*" as a module id, and strip the // REMOVE_THIS from the moduleId later on the process. if (!moduleFound && !moduleId.startsWith("REMOVE_THIS")) { throw new LocalException( "Module id is not a valid module id " + moduleId); } if(moduleId.startsWith("REMOVE_THIS")){ moduleId = moduleId.replace("REMOVE_THIS",""); } // Refset id against pattern if (EnumSet.of(MapRefsetPattern.ComplexMap, MapRefsetPattern.ExtendedMap) .contains(mapProject.getMapRefsetPattern())) { if (!metadataService .getComplexMapRefSets(mapProject.getSourceTerminology(), mapProject.getSourceTerminologyVersion()) .containsKey(mapProject.getRefSetId())) { // really, this is to support "fake" map projects if (!testModeFlag) { throw new LocalException( "Map project refset id is not a valid complex map refset id " + mapProject.getRefSetId()); } } } else if (EnumSet.of(MapRefsetPattern.SimpleMap) .contains(mapProject.getMapRefsetPattern())) { if (!metadataService .getSimpleMapRefSets(mapProject.getSourceTerminology(), mapProject.getSourceTerminologyVersion()) .containsKey(mapProject.getRefSetId())) { // really, this is to support "fake" map projects if (!testModeFlag) { throw new Exception( "Map project refset id is not a valid simple map refset id " + mapProject.getRefSetId()); } } } // check output directory exists File outputDirFile = new File(outputDir); if (!outputDirFile.isDirectory()) throw new Exception( "Output file directory (" + outputDir + ") could not be found."); // Compute default preferred names logger.info(" Compute default preferred names"); computeDefaultPreferredNames(); // instantiate the project specific handler algorithmHandler = mappingService.getProjectSpecificAlgorithmHandler(mapProject); // Write module dependency file Set<String> moduleDependencies = algorithmHandler.getDependentModules(); if (moduleDependencies.size() > 0) { createdFilenames.add(writeModuleDependencyFile(moduleDependencies, algorithmHandler.getModuleDependencyRefSetId())); } // Prepare data // put all map records into the map record map for (final MapRecord mr : mapRecords) { if (mr == null) { throw new Exception("Null record found in published list"); } // Skip out of scope records if (!scopeConceptTerminologyIds.contains(mr.getConceptId())) { continue; } mapRecordMap.put(mr.getConceptId(), mr); } // create a list from the set and sort by concept id logger.info(" Sorting records"); Collections.sort(mapRecords, new Comparator<MapRecord>() { @Override public int compare(MapRecord o1, MapRecord o2) { Long conceptId1 = Long.parseLong(o1.getConceptId()); Long conceptId2 = Long.parseLong(o2.getConceptId()); return conceptId1.compareTo(conceptId2); } }); // Get maps // NOTE for simple or complex case, we get complex map records // and write the appropriate level of detail logger.info(" Retrieving maps"); // retrieve the complex map ref set members for this project's refset id // This also handles simple members ComplexMapRefSetMemberList prevMemberList = contentService .getComplexMapRefSetMembersForRefSetId(mapProject.getRefSetId()); // construct map of existing complex ref set members by UUID fields // this is used for comparison purposes later // after record processing, the remaining ref set members // represent those entries that are now inactive Map<String, ComplexMapRefSetMember> prevMembersHashMap = new HashMap<>(); int simpleBlankTargetCt = 0; for (final ComplexMapRefSetMember member : prevMemberList .getComplexMapRefSetMembers()) { // Skip lines for SimpleMap where the map target is empty // These are just placeholders for managing scope // NOTE: if there is a need to have a simple map with blank targets // this could be coded in some other way, like "NOCODE" instead of // blank if (mapProject.getMapRefsetPattern() == MapRefsetPattern.SimpleMap && member.getMapTarget().isEmpty()) { simpleBlankTargetCt++; continue; } prevMembersHashMap.put(getHash(member), member); } // output size of each collection logger.info(" Cached distinct UUID-quintuples = " + prevMembersHashMap.keySet().size()); logger.info(" Existing complex ref set members for project = " + prevMemberList.getCount()); // if sizes do not match, output warning if (mapProject.getMapRefsetPattern() != MapRefsetPattern.SimpleMap && prevMembersHashMap.keySet().size() != prevMemberList.getCount()) { throw new Exception( "UUID-quintuples count does not match refset member count"); } if (mapProject.getMapRefsetPattern() == MapRefsetPattern.SimpleMap && (prevMembersHashMap.keySet().size() + simpleBlankTargetCt) != prevMemberList.getCount()) { throw new Exception( "UUID-quintuples count does not match refset member count for SimpleMap"); } // clear the ref set members list (no longer used) prevMemberList = null; // Perform the release // Prep map relation to use for up propagated records final MapRelation ifaRuleRelation = algorithmHandler.getDefaultUpPropagatedMapRelation(); if (mapProject.isPropagatedFlag() && ifaRuleRelation == null) { throw new Exception( "Unable to find default map relation for up propagated records"); } logger.info(" Processing release"); // cycle over the map records marked for publishing int ct = 0; final Map<String, ComplexMapRefSetMember> activeMembersMap = new HashMap<>(); for (final MapRecord mapRecord : mapRecords) { // Skip out of scope records if (!scopeConceptTerminologyIds.contains(mapRecord.getConceptId())) { continue; } logger.info(" Processing record for " + mapRecord.getConceptId()); ct++; // If map record is inactive, skip if (!contentService.getConcept(mapRecord.getConceptId(), mapProject.getSourceTerminology(), mapProject.getSourceTerminologyVersion()).isActive()) { logger.info( " Skipping inactive concept " + mapRecord.getConceptId()); continue; } if (ct % 5000 == 0) { logger.info(" count = " + ct); } // instantiate map of entries by group // this is the object containing entries to write final Map<Integer, List<MapEntry>> entriesByGroup = new HashMap<>(); // Check for up-propagation if (mapProject.isPropagatedFlag() && contentService.getDescendantConceptsCount( mapRecord.getConceptId(), mapProject.getSourceTerminology(), mapProject.getSourceTerminologyVersion()) < mapProject .getPropagationDescendantThreshold()) { // Handle up propagation for this record if (!handleUpPropagation(mapRecord, entriesByGroup, ifaRuleRelation)) { // handle cases that cannot be up propagated continue; } } else { logger.debug(" DO NOT up propagate " + mapRecord.getConceptId()); } // Add the original (non-propagated) entries logger.debug(" Adding original entries"); for (MapEntry me : mapRecord.getMapEntries()) { logger.debug(" Adding entry " + me.getId()); List<MapEntry> existingEntries = entriesByGroup.get(me.getMapGroup()); if (existingEntries == null) existingEntries = new ArrayList<>(); // create a new managed instance for this entry // necessary because an up-propagated record might attempt to access // the original entry -- thus do not want to modify it MapEntry newEntry = new MapEntryJpa(); newEntry.setMapAdvices(me.getMapAdvices()); newEntry.setMapGroup(me.getMapGroup()); newEntry.setMapBlock(me.getMapBlock()); newEntry.setMapRecord(mapRecord); newEntry.setRule(mapProject.isRuleBased() ? me.getRule() : ""); newEntry.setTargetId(me.getTargetId()); newEntry.setTargetName(me.getTargetName()); // if not the first entry and contains TRUE rule, set to // OTHERWISE TRUE if (mapProject.isRuleBased() && existingEntries.size() > 0 && newEntry.getRule().equals("TRUE")) newEntry.setRule("OTHERWISE TRUE"); // recalculate the map relation newEntry.setMapRelation( algorithmHandler.computeMapRelation(mapRecord, me)); // add to the existing entries list existingEntries.add(newEntry); // replace the previous list with the new list entriesByGroup.put(newEntry.getMapGroup(), existingEntries); } // Check each group capped with TRUE or OTHERWISE TRUE // only perform if project is rule based if (mapProject.isRuleBased()) { for (int mapGroup : entriesByGroup.keySet()) { List<MapEntry> existingEntries = entriesByGroup.get(mapGroup); // if no entries or last entry is not true if (existingEntries.size() == 0 || !existingEntries .get(existingEntries.size() - 1).getRule().contains("TRUE")) { // create a new map entry MapEntry newEntry = new MapEntryJpa(); // set the record and group newEntry.setMapRecord(mapRecord); newEntry.setMapGroup(mapGroup); // set the rule to TRUE if no entries, OTHERWISE true if // entries exist if (existingEntries.size() == 0) newEntry.setRule("TRUE"); else newEntry.setRule("OTHERWISE TRUE"); // compute the map relation for no target for this // project newEntry.setMapRelation( algorithmHandler.computeMapRelation(mapRecord, newEntry)); // add the entry and replace in the entries-by-group map existingEntries.add(newEntry); entriesByGroup.put(mapGroup, existingEntries); } } } // Convert the record to complex map ref set members // get the concept Concept concept = conceptCache.get(mapRecord.getConceptId()); if (concept == null) { throw new Exception("Map record exists for nonexistent concept: " + mapRecord.getConceptId()); } if (!concept.isActive()) { throw new Exception("Map record exists for inactive concept: " + mapRecord.getConceptId()); } // cycle over groups and entries in sequence // Collect active only entries for (int mapGroup : entriesByGroup.keySet()) { int mapPriority = 1; for (final MapEntry mapEntry : entriesByGroup.get(mapGroup)) { // convert this map entry into a complex map ref set member ComplexMapRefSetMember member = getComplexMapRefSetMemberForMapEntry(mapEntry, mapRecord, mapProject, concept); if (mapProject .getMapRefsetPattern() == MapRefsetPattern.SimpleMap) { // Run member through simple/complex conversion // This makes sure what was read from the database // matches for non-simple fields what was generated in // getComplexMapRefSetMemberForMapEntry member = new ComplexMapRefSetMemberJpa( new SimpleMapRefSetMemberJpa(member)); } final String uuidStr = getHash(member); // attempt to retrieve any existing complex map ref set // member final ComplexMapRefSetMember prevMember = prevMembersHashMap.get(uuidStr); // if existing found, re-use uuid, otherwise generate new if (prevMember == null) { member.setTerminologyId( ConfigUtility.getReleaseUuid(uuidStr).toString()); } else { member.setTerminologyId(prevMember.getTerminologyId()); } // assign and increment map priority member.setMapPriority(mapPriority++); // add this entry to the list of members to write if (activeMembersMap.containsKey(member.getTerminologyId())) { logger.error( activeMembersMap.get(member.getTerminologyId()).toString()); logger.error(member.toString()); throw new Exception("Duplicate id found"); } ValidationResult result = null; result = algorithmHandler.validateForRelease(member); if (result != null && !result.isValid()) { // LEt it pass if in test mode if (testModeFlag) { logger.info(" WARNING: invalid map entry: " + member); logger.info(" errors = " + result.getErrors()); // continue; } else { throw new Exception("Invalid member for " + member.getConcept().getTerminologyId() + " - " + result); } } // Skip lines for SimpleMap where the map target is empty // These are just placeholders for managing scope // NOTE: if there is a need to have a simple map with blank targets // this could be coded in some other way, like "NOCODE" instead of // blank if (mapProject.getMapRefsetPattern() == MapRefsetPattern.SimpleMap && member.getMapTarget().isEmpty()) { // do not add it } // else, do else { activeMembersMap.put(member.getTerminologyId(), member); } } } // clear the service -- memory management contentService.clear(); } // Prepare for file write // declare maps in use for computation Map<String, ComplexMapRefSetMember> prevActiveMembersMap = new HashMap<>(); Map<String, ComplexMapRefSetMember> prevInactiveMembersMap = new HashMap<>(); // First, construct set of previously active complex map ref set members for (final ComplexMapRefSetMember member : prevMembersHashMap.values()) { if (member.isActive()) { prevActiveMembersMap.put(member.getTerminologyId(), member); } else { prevInactiveMembersMap.put(member.getTerminologyId(), member); } } logger.info(" prev inactive members = " + prevInactiveMembersMap.size()); logger.info(" prev active members = " + prevActiveMembersMap.size()); logger.info(" active members = " + activeMembersMap.size()); // Write human readable file createdFilenames.add(writeHumanReadableFile(activeMembersMap)); // Write active snapshot file if (writeActiveSnapshot) { writeActiveSnapshotFile(activeMembersMap); } // Write snapshot file if (writeSnapshot) { createdFilenames.add(writeActiveSnapshotFile(activeMembersMap)); createdFilenames.add(writeSnapshotFile(prevInactiveMembersMap, prevActiveMembersMap, activeMembersMap)); } // Write delta file if (writeDelta) { createdFilenames .add(writeDeltaFile(activeMembersMap, prevActiveMembersMap)); } // Write statistics createdFilenames .add(writeStatsFile(activeMembersMap, prevActiveMembersMap)); // Zip up the created files, and datestamp it. // report process if (!outputDir.contains("current")) { Date date = new Date(); SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HHmmss"); File outputFile = new File(outputDir + "/" + mapProject.getSourceTerminology() + "_to_" + mapProject.getDestinationTerminology() + "_" + mapProject.getRefSetId() + "_" + dateFormat.format(date) + ".zip"); zipFiles(createdFilenames, outputFile); } // write the concept errors logger.info("Concept errors (" + conceptErrors.keySet().size() + ")"); for (final String terminologyId : conceptErrors.keySet()) { logger.info( " " + terminologyId + ": " + conceptErrors.get(terminologyId)); } logger.info(" Done processing the release"); // Clean up // close the services contentService.close(); mappingService.close(); } catch (Exception e) { logger.error(e.getMessage(), e); throw new Exception(e); } } /** * Zip files. * * @param createdFilenames the created filenames * @param outputFile the output file */ private void zipFiles(List<String> createdFilenames, File outputFile) { FileOutputStream fos = null; ZipOutputStream zipOut = null; FileInputStream fis = null; try { fos = new FileOutputStream(outputFile); zipOut = new ZipOutputStream(new BufferedOutputStream(fos)); for (String filePath : createdFilenames) { File input = new File(filePath); fis = new FileInputStream(input); ZipEntry ze = new ZipEntry(input.getName()); System.out.println("Zipping the file: " + input.getName()); zipOut.putNextEntry(ze); byte[] tmp = new byte[4 * 1024]; int size = 0; while ((size = fis.read(tmp)) != -1) { zipOut.write(tmp, 0, size); } zipOut.flush(); fis.close(); } zipOut.close(); System.out.println("Done... Zipped the files..."); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { try { if (fos != null) fos.close(); } catch (Exception ex) { } } } /** * Handle up propagation. * * @param mapRecord the map record * @param entriesByGroup the entries by group * @param ifaRuleRelation the ifa rule relation * @return true, if successful * @throws Exception the exception */ private boolean handleUpPropagation(MapRecord mapRecord, Map<Integer, List<MapEntry>> entriesByGroup, MapRelation ifaRuleRelation) throws Exception { // Get the tree positions for this concept TreePosition treePosition = null; try { // get any tree position for this concept treePosition = contentService.getAnyTreePositionWithDescendants( mapRecord.getConceptId(), mapProject.getSourceTerminology(), mapProject.getSourceTerminologyVersion()); if (treePosition != null) { logger.debug(" Tree position: " + treePosition.getAncestorPath() + " - " + mapRecord.getConceptId()); } } catch (Exception e) { throw new Exception( "Error getting tree position for " + mapRecord.getConceptId()); } // check if tree positions were successfully retrieved if (treePosition == null) { throw new Exception("Could not retrieve any tree position for " + mapRecord.getConceptId()); } // get a list of tree positions sorted by position in hierarchy // (deepest-first) // NOTE: This list will contain the top-level/root map record List<TreePosition> treePositionDescendantList = getSortedTreePositionDescendantList(treePosition); // Process up-propagated entries // set of already processed concepts (may be multiple routes) Set<String> descendantsProcessed = new HashSet<>(); // cycle over the tree positions again and add entries // note that the tree positions are in reverse order of // hierarchy depth for (final TreePosition tp : treePositionDescendantList) { // avoid re-rendering nodes already rendered if (!descendantsProcessed.contains(tp.getTerminologyId())) { logger.debug(" Processing descendant " + tp.getTerminologyId()); // add this descendant to the processed list descendantsProcessed.add(tp.getTerminologyId()); // skip the root level record, these entries are added // below, after the up-propagated entries if (!tp.getTerminologyId().equals(mapRecord.getConceptId())) { // get the parent map record for this tree position // used to check if entries are duplicated on parent String parent = tp.getAncestorPath() .substring(tp.getAncestorPath().lastIndexOf("~") + 1); MapRecord mrParent = getMapRecordForTerminologyId(parent); // get the map record corresponding to this specific // ancestor path + concept Id MapRecord mr = getMapRecordForTerminologyId(tp.getTerminologyId()); if (mr != null) { logger.debug(" Adding entries from map record " + mr.getId() + ", " + mr.getConceptId() + ", " + mr.getConceptName()); // cycle over the entries // TODO: this should actually compare entire groups and not just // entries // to account for embedded age/gender rules. Otherwise a partial // group could // be explicitly rendered and the logic would be wrong // Thus if all the entries for a group match the parent, then none // need to be rendered, otherwise all do. for (final MapEntry me : mr.getMapEntries()) { // get the current list of entries for this group List<MapEntry> existingEntries = entriesByGroup.get(me.getMapGroup()); if (existingEntries == null) { existingEntries = new ArrayList<>(); } // flag for whether this entry is a duplicate of // an existing or parent entry boolean isDuplicateEntry = false; // compare to the entries on the parent record to the current // entry // If a match is found, this entry is duplicated and does not // need an explicit entry // (this produces short-form) // NOTE: This uses unmodified rules if (mrParent != null) { for (final MapEntry parentEntry : mrParent.getMapEntries()) { if (parentEntry.getMapGroup() == me.getMapGroup() && parentEntry.isEquivalent(me)) { isDuplicateEntry = true; break; } } } // if not a duplicate entry, add it to the map if (!isDuplicateEntry) { logger.debug(" Entry is not a duplicate of parent"); logger.debug(" entry = " + me); // create new map entry to prevent // hibernate-managed entity modification (leave id unset) MapEntry newEntry = new MapEntryJpa(); newEntry.setMapAdvices(me.getMapAdvices()); newEntry.setMapGroup(me.getMapGroup()); newEntry.setMapBlock(me.getMapBlock()); newEntry.setMapRecord(mr); newEntry.setRule(me.getRule()); // no-op for // non-rule-based // projects newEntry.setTargetId(me.getTargetId()); newEntry.setTargetName(me.getTargetName()); // set the propagated rule for this entry if (mapProject.isRuleBased()) { newEntry = setPropagatedRuleForMapEntry(newEntry); } // use the map relation // MAP OF SOURCE CONCEPT IS CONTEXT DEPENDENT | 447639009 // except where target code is NC if (newEntry.getTargetId() == null || newEntry.getTargetId().isEmpty()) { newEntry.setMapRelation(me.getMapRelation()); } else { newEntry.setMapRelation(ifaRuleRelation); } // add to the list existingEntries.add(newEntry); // replace existing list with modified list - unnecessary entriesByGroup.put(newEntry.getMapGroup(), existingEntries); } else { logger.debug(" Entry IS DUPLICATE of parent, do not write"); logger.debug(" entry = " + me); } } } else { // do nothing: no map record for this descendant could be found // likely this is a scope excludes condition } } } } return true; } /** * Write module dependency file. * * @param moduleDependencies the module dependencies * @param refSetId the ref set id * @throws Exception the exception */ private String writeModuleDependencyFile(Set<String> moduleDependencies, String refSetId) throws Exception { logger.info(" Write module dependency file"); logger.info(" count = " + moduleDependencies.size()); // Open file String filename = null; BufferedWriter writer = null; filename = outputDir + "/der2_ssRefset_ModuleDependencyDelta_" + algorithmHandler.getReleaseFile3rdElement() + "_" + effectiveTime + ".txt"; writer = new BufferedWriter(new FileWriter(filename)); // Write header writer.write( "id\teffectiveTime\tactive\tmoduleId\trefsetId\treferencedComponentId\tsourceEffectiveTime\ttargetEffectiveTime" + "\r\n"); // Write lines for (final String module : moduleDependencies) { String moduleStr = ConfigUtility.getUuidForString(moduleId + refSetId + module) .toString() + "\t" + effectiveTime + "\t" + "1" + "\t" + moduleId + "\t" + refSetId + "\t" + module + "\t" + effectiveTime + "\t" + effectiveTime + "\r\n"; writer.write(moduleStr); } // Close writer.flush(); writer.close(); return filename; } /** * Write delta. * * @param activeMembers the active members * @param prevActiveMembers the previous active members * @throws Exception the exception */ private String writeDeltaFile( Map<String, ComplexMapRefSetMember> activeMembers, Map<String, ComplexMapRefSetMember> prevActiveMembers) throws Exception { // Open file and writer String filename = null; BufferedWriter writer = null; String pattern = getPatternForType(mapProject); filename = outputDir + "/der2_" + pattern + mapProject.getMapRefsetPattern() + "Delta_" + algorithmHandler.getReleaseFile3rdElement() + "_" + effectiveTime + ".txt"; logger.info(" delta: " + filename); // Write headers (subject to pattern) // writer = new BufferedWriter(new FileWriter(filename)); writer = new BufferedWriter(new OutputStreamWriter( new FileOutputStream(filename), StandardCharsets.UTF_8)); writer.write(getHeader(mapProject)); writer.write("\r\n"); // Compute retired, new, and changed.. discard unchanged for delta Map<String, ComplexMapRefSetMember> tmpActiveMembers = new HashMap<>(activeMembers); logger.info(" Computing delta entries"); // cycle over all previously active members for (final ComplexMapRefSetMember member : prevActiveMembers.values()) { // if set to write contains this previously active uuid if (tmpActiveMembers.containsKey(member.getTerminologyId())) { // if this previously active member is present (equality check) in the // set to be written if (member.equals(tmpActiveMembers.get(member.getTerminologyId()))) { // remove this concept from the set to be written -- unchanged tmpActiveMembers.remove(member.getTerminologyId()); } else { // do nothing -- modified, write it } } else { // do nothing -- new, write it } } // write new or modified maps to file // no sorting needed here for (final ComplexMapRefSetMember c : tmpActiveMembers.values()) { writer.write(getOutputLine(c, false)); } logger.info(" Writing complete."); // case 2: previously active no longer present // Copy previously active map of uuids to write into temp map // For each uuid in current write set // - check temp map for this uuid // - if present, remove from temp map // Inactivate all remaining uuids in the temp map tmpActiveMembers = new HashMap<>(prevActiveMembers); for (final String uuid : activeMembers.keySet()) { if (tmpActiveMembers.containsKey(uuid)) { tmpActiveMembers.remove(uuid); } } // set active to false and write inactivated complex maps for (final ComplexMapRefSetMember c : tmpActiveMembers.values()) { c.setActive(false); writer.write(this.getOutputLine(c, false)); // restore active c.setActive(true); } logger.info(" Writing complete."); writer.flush(); writer.close(); return filename; } /** * Write stats file. * * @param activeMembers the active members * @param prevActiveMembers the prev active members * @throws Exception the exception */ private String writeStatsFile( Map<String, ComplexMapRefSetMember> activeMembers, Map<String, ComplexMapRefSetMember> prevActiveMembers) throws Exception { // Gather stats Set<String> activeConcepts = new HashSet<>(); Map<String, Integer> entryCount = new HashMap<>(); Set<String> multipleEntryConcepts = new HashSet<>(); Set<String> multipleGroupConcepts = new HashSet<>(); Set<String> alwaysNc = new HashSet<>(); Set<String> neverNc = new HashSet<>(); Set<String> sometimesMap = new HashSet<>(); for (final ComplexMapRefSetMember member : activeMembers.values()) { String key = member.getConcept().getTerminologyId(); alwaysNc.add(key); neverNc.add(key); if (!entryCount.containsKey(key)) { entryCount.put(key, Integer.valueOf(0)); } int maxCt = entryCount.get(key).intValue() + 1; entryCount.put(key, maxCt); updateStatMax(Stats.MAX_ENTRIES.getValue(), maxCt); } for (final ComplexMapRefSetMember member : activeMembers.values()) { String key = member.getConcept().getTerminologyId(); activeConcepts.add(key); if (member.getMapPriority() > 1) { multipleEntryConcepts.add(key); } if (member.getMapGroup() > 1) { multipleGroupConcepts.add(key); } if (member.getMapTarget() == null || member.getMapTarget().isEmpty()) { neverNc.remove(key); } if (member.getMapTarget() != null && !member.getMapTarget().isEmpty()) { alwaysNc.remove(key); sometimesMap.add(key); } } Set<String> prevActiveConcepts = new HashSet<>(); for (final ComplexMapRefSetMember member : prevActiveMembers.values()) { prevActiveConcepts.add(member.getConcept().getTerminologyId()); } updateStatMax(Stats.ACTIVE_ENTRIES.getValue(), activeMembers.size()); updateStatMax(Stats.CONCEPTS_MAPPED.getValue(), activeConcepts.size()); updateStatMax(Stats.COMPLEX_MAPS.getValue(), multipleEntryConcepts.size()); updateStatMax(Stats.MULTIPLE_GROUPS.getValue(), multipleGroupConcepts.size()); updateStatMax(Stats.ALWAYS_MAP.getValue(), neverNc.size()); updateStatMax(Stats.SOMETIMES_MAP.getValue(), sometimesMap.size()); updateStatMax(Stats.NEVER_MAP.getValue(), alwaysNc.size()); // Determine count of retired concepts - inactive minus active int ct = 0; for (final String id : prevActiveConcepts) { if (!activeConcepts.contains(id)) { ct++; } } updateStatMax(Stats.RETIRED_CONCEPTS.getValue(), ct); // Determine count of new concepts - active minus inactive ct = 0; for (final String id : activeConcepts) { if (!prevActiveConcepts.contains(id)) { ct++; } } updateStatMax(Stats.NEW_CONCEPTS.getValue(), ct); Set<String> changedConcepts = new HashSet<>(); for (final String key : activeMembers.keySet()) { ComplexMapRefSetMember member = activeMembers.get(key); ComplexMapRefSetMember member2 = prevActiveMembers.get(key); if (member2 != null && !member.equals(member2)) { changedConcepts.add(member.getConcept().getId().toString()); } } updateStatMax(Stats.CHANGED_CONCEPTS.getValue(), changedConcepts.size()); String camelCaseName = mapProject.getDestinationTerminology().substring(0, 1) + mapProject.getDestinationTerminology().substring(1).toLowerCase(); final String filename = outputDir + "/" + camelCaseName + "stats.txt"; BufferedWriter statsWriter = new BufferedWriter(new FileWriter(filename)); List<String> statistics = new ArrayList<>(reportStatistics.keySet()); Collections.sort(statistics); for (final String statistic : statistics) { statsWriter .write(statistic + "\t" + reportStatistics.get(statistic) + "\r\n"); } statsWriter.close(); return filename; } /** * Write active snapshot file. * * @param members the members * @throws Exception the exception */ @SuppressWarnings("resource") private String writeActiveSnapshotFile( Map<String, ComplexMapRefSetMember> members) throws Exception { logger.info("Writing active snapshot..."); // Set pattern final String pattern = getPatternForType(mapProject); String filename = null; BufferedWriter writer = null; filename = outputDir + "/der2_" + pattern + mapProject.getMapRefsetPattern() + "ActiveSnapshot_" + algorithmHandler.getReleaseFile3rdElement() + "_" + effectiveTime + ".txt"; // write headers logger.info(" active snapshot: " + filename); // writer = new BufferedWriter(new FileWriter(filename)); writer = new BufferedWriter(new OutputStreamWriter( new FileOutputStream(filename), StandardCharsets.UTF_8)); writer.write(getHeader(mapProject)); writer.write("\r\n"); // Write members final List<String> lines = new ArrayList<>(); for (final ComplexMapRefSetMember member : members.values()) { if (!member.isActive()) { throw new Exception("Unexpected inactive member " + member); } // collect lines lines.add(getOutputLine(member, false)); } // Sort lines if not simple if (mapProject.getMapRefsetPattern() != MapRefsetPattern.SimpleMap) { Collections.sort(lines, ConfigUtility.COMPLEX_MAP_COMPARATOR); } // Write lines for (final String line : lines) { writer.write(line); } logger.info(" Writing complete."); // Close writer.flush(); writer.close(); return filename; } /** * Write snapshot file. * * @param prevInactiveMembers the prev inactive members * @param prevActiveMembers the prev active members * @param currentActiveMembers the current active members * @throws Exception the exception */ private String writeSnapshotFile( Map<String, ComplexMapRefSetMember> prevInactiveMembers, Map<String, ComplexMapRefSetMember> prevActiveMembers, Map<String, ComplexMapRefSetMember> currentActiveMembers) throws Exception { logger.info("Writing snapshot..."); String pattern = getPatternForType(mapProject); String filename = null; BufferedWriter writer = null; filename = outputDir + "/der2_" + pattern + mapProject.getMapRefsetPattern() + "Snapshot_" + algorithmHandler.getReleaseFile3rdElement() + "_" + effectiveTime + ".txt"; // write headers logger.info(" snapshot file: " + filename); // writer = new BufferedWriter(new FileWriter(filename)); writer = new BufferedWriter(new OutputStreamWriter( new FileOutputStream(filename), StandardCharsets.UTF_8)); writer.write(getHeader(mapProject)); writer.write("\r\n"); List<String> lines = new ArrayList<>(); // Write previously inactive members that are not active now for (final String key : prevInactiveMembers.keySet()) { if (!currentActiveMembers.containsKey(key)) { // write out previous inactive line lines.add(getOutputLine(prevInactiveMembers.get(key), true)); } else { // write out the current active line lines.add(getOutputLine(currentActiveMembers.get(key), true)); } } // Write previous active members (changed, unchanged, or inactive) for (final String key : prevActiveMembers.keySet()) { if (!currentActiveMembers.containsKey(key)) { // active value is always changing here from 1 to 0, // so we should always write the previous member with an updated // effective time (e.g. "trueEffectiveTime" parameter is false) ComplexMapRefSetMember member = prevActiveMembers.get(key); member.setActive(false); lines.add(getOutputLine(member, false)); member.setActive(true); } else { ComplexMapRefSetMember member = currentActiveMembers.get(key); ComplexMapRefSetMember member2 = prevActiveMembers.get(key); if (member.equals(member2)) { // write with older effective time lines.add(getOutputLine(member2, true)); } else { // write with newer effective time lines.add(getOutputLine(member, true)); } } } // Write new things (things that were not in old release) for (final String key : currentActiveMembers.keySet()) { if (!prevActiveMembers.containsKey(key) && !prevInactiveMembers.containsKey(key)) { lines.add(getOutputLine(currentActiveMembers.get(key), true)); } } // Sort lines Collections.sort(lines, ConfigUtility.COMPLEX_MAP_COMPARATOR); // Write lines for (final String line : lines) { writer.write(line); } logger.info(" Writing complete."); // Close writer.flush(); writer.close(); return filename; } /** * Write human readable file. * * @param members the members * @throws Exception the exception */ private String writeHumanReadableFile( Map<String, ComplexMapRefSetMember> members) throws Exception { // Open file and writer String humanReadableFileName = null; BufferedWriter humanReadableWriter = null; String camelCaseName = mapProject.getDestinationTerminology().substring(0, 1) + mapProject.getDestinationTerminology().substring(1).toLowerCase(); humanReadableFileName = outputDir + "/tls_" + camelCaseName + "HumanReadableMap_" + algorithmHandler.getReleaseFile3rdElement() + "_" + effectiveTime + ".tsv"; humanReadableWriter = // new BufferedWriter(new FileWriter(humanReadableFileName)); new BufferedWriter( new OutputStreamWriter(new FileOutputStream(humanReadableFileName), StandardCharsets.UTF_8)); // Write headers (subject to pattern) MapRefsetPattern pattern = mapProject.getMapRefsetPattern(); if (pattern == MapRefsetPattern.ExtendedMap) { if (humanReadableWriter != null) { humanReadableWriter.write( "id\teffectiveTime\tactive\tmoduleId\trefsetId\treferencedComponentId\treferencedComponentName\tmapGroup\tmapPriority\tmapRule\tmapAdvice\tmapTarget\tmapTargetName\tcorrelationId\tmapCategoryId\tmapCategoryName\r\n"); humanReadableWriter.flush(); } } else if (pattern == MapRefsetPattern.ComplexMap) { if (humanReadableWriter != null) { humanReadableWriter.write( "id\teffectiveTime\tactive\tmoduleId\trefsetId\treferencedComponentId\treferencedComponentName\tmapGroup\tmapPriority\tmapRule\tmapAdvice\tmapTarget\tmapTargetName\tcorrelationId\tcorrelationValue\r\n"); humanReadableWriter.flush(); } } else if (pattern == MapRefsetPattern.SimpleMap) { if (humanReadableWriter != null) { humanReadableWriter.write( "id\teffectiveTime\tactive\tmoduleId\trefsetId\treferencedComponentId\treferencedComponentName\tmapTarget\tmapTargetName\r\n"); humanReadableWriter.flush(); } } // Write entries List<String> lines = new ArrayList<>(); for (final ComplexMapRefSetMember member : members.values()) { // get the map relation name for the human readable file MapRelation mapRelation = null; for (final MapRelation mr : mapProject.getMapRelations()) { if (mr.getTerminologyId() != null && member.getMapRelationId() != null && mr.getTerminologyId() .equals(member.getMapRelationId().toString())) { mapRelation = mr; } } // get target concept, if not null for its preferred name Concept targetConcept = null; if (member.getMapTarget() != null && !member.getMapTarget().isEmpty()) { targetConcept = contentService.getConcept(member.getMapTarget(), mapProject.getDestinationTerminology(), mapProject.getDestinationTerminologyVersion()); } // switch line on map relation style String entryLine = null; if (mapProject.getMapRefsetPattern() == MapRefsetPattern.ExtendedMap) { entryLine = member.getTerminologyId() + "\t" + effectiveTime + "\t" + (member.isActive() ? "1" : "0") + "\t" + moduleId + "\t" + member.getRefSetId() + "\t" + member.getConcept().getTerminologyId() + "\t" + member.getConcept().getDefaultPreferredName() + "\t" + member.getMapGroup() + "\t" + member.getMapPriority() + "\t" + (mapProject.isRuleBased() ? member.getMapRule() : "") + "\t" + member.getMapAdvice() + "\t" + (member.getMapTarget() == null ? "" : member.getMapTarget()) + "\t" + (targetConcept != null ? targetConcept.getDefaultPreferredName() : "") + "\t" + "447561005" + "\t" // fixed value for Extended map + member.getMapRelationId() + "\t" + (mapRelation != null ? mapRelation.getName() : "FAILED MAP RELATION"); // ComplexMap style is identical to ExtendedMap // with the exception of the terminating map relation terminology id } else if (mapProject .getMapRefsetPattern() == MapRefsetPattern.ComplexMap) { entryLine = member.getTerminologyId() // the UUID + "\t" + effectiveTime + "\t" + (member.isActive() ? "1" : "0") + "\t" + moduleId + "\t" + member.getRefSetId() + "\t" + member.getConcept().getTerminologyId() + "\t" + member.getConcept().getDefaultPreferredName() + "\t" + member.getMapGroup() + "\t" + member.getMapPriority() + "\t" + (mapProject.isRuleBased() ? member.getMapRule() : "") + "\t" + member.getMapAdvice() + "\t" + member.getMapTarget() + "\t" + (targetConcept != null ? targetConcept.getDefaultPreferredName() : "") + "\t" + member.getMapRelationId() + "\t" + (mapRelation != null ? mapRelation.getName() : "FAILED MAP RELATION"); } // Simple else if (mapProject.getMapRefsetPattern() == MapRefsetPattern.SimpleMap) { entryLine = member.getTerminologyId() // the UUID + "\t" + effectiveTime + "\t" + (member.isActive() ? "1" : "0") + "\t" + moduleId + "\t" + member.getRefSetId() + "\t" + member.getConcept().getTerminologyId() + "\t" + member.getConcept().getDefaultPreferredName() + "\t" + member.getMapTarget() + "\t" + (targetConcept != null ? targetConcept.getDefaultPreferredName() : ""); } entryLine += "\r\n"; lines.add(entryLine); } // Sort lines Collections.sort(lines, ConfigUtility.TSV_COMPARATOR); // Write file for (final String line : lines) { humanReadableWriter.write(line); } // Close humanReadableWriter.flush(); humanReadableWriter.close(); return humanReadableFileName; } /** * Helper function to retrieve a map record for a given tree position. If in * set, returns that map record, if not, retrieves and adds it if possible. * * @param terminologyId the terminology id * @return the map record for terminology id * @throws Exception the exception */ private MapRecord getMapRecordForTerminologyId(String terminologyId) throws Exception { // if in cache, use cached records if (mapRecordMap.containsKey(terminologyId)) { return mapRecordMap.get(terminologyId); } else { MapRecord mapRecord = null; // if not in cache yet, get record(s) for this concept MapRecordList mapRecordList = mappingService .getMapRecordsForProjectAndConcept(mapProject.getId(), terminologyId); // check number of records retrieved for erroneous // states if (mapRecordList.getCount() == 0) { // if on excluded list, add to errors to output if (mapProject.getScopeExcludedConcepts().contains(terminologyId)) { // This is an acceptable condition to have and report conceptErrors.put(terminologyId, " Concept referenced, but on excluded list for project"); // if not found, add to errors to output } else { // if it cannot be found and is not on scope excluded list // this is a serious error and the map file could be wrong without it. // If in test mode, allow this to not be the case if (testModeFlag) { return null; } throw new Exception("Unable to find map record for " + terminologyId); } } else if (mapRecordList.getCount() > 1) { // If in test mode, allow this to be the case if (testModeFlag) { return null; } throw new Exception("Multiple map records found for " + terminologyId); } else { mapRecord = mapRecordList.getMapRecords().iterator().next(); // if ready for publication, add to map if (mapRecord.getWorkflowStatus() .equals(WorkflowStatus.READY_FOR_PUBLICATION) || mapRecord.getWorkflowStatus().equals(WorkflowStatus.PUBLISHED)) { // add to map record map and return it mapRecordMap.put(terminologyId, mapRecord); return mapRecord; } else { if (testModeFlag) { return null; } throw new Exception( "Invalid workflow status " + mapRecord.getWorkflowStatus() + " on record for " + terminologyId); } } } return null; } /** * Helper function to get a map key identifier for a complex map Used to * determine whether an entry exists in set. * * @param c the c * @return the string */ @SuppressWarnings("static-method") private String getHash(ComplexMapRefSetMember c) { return c.getRefSetId() + c.getConcept().getTerminologyId() + c.getMapGroup() + (c.getMapRule() == null ? "" : c.getMapRule()) + (c.getMapTarget() == null ? "" : c.getMapTarget()); } /** * Returns the hash. * * @param entry the entry * @return the hash */ private String getHash(MapEntry entry) { return mapProject.getRefSetId() + entry.getMapRecord().getConceptId() + entry.getMapGroup() + (entry.getRule() == null ? "" : entry.getRule()) + (entry.getTargetId() == null ? "" : entry.getTargetId()); } /** The relations. */ private Map<Long, MapRelation> relations = null; /** The advices. */ private List<MapAdvice> advices = null; /** * Returns the map entry for complex map ref set member. * * @param member the member * @return the map entry for complex map ref set member * @throws Exception the exception */ private MapEntry getMapEntryForComplexMapRefSetMember( ComplexMapRefSetMember member) throws Exception { if (relations == null) { relations = new HashMap<>(); for (MapRelation m : mappingService.getMapRelations().getMapRelations()) { relations.put(Long.valueOf(m.getTerminologyId()), m); } } if (advices == null) { advices = mappingService.getMapAdvices().getMapAdvices(); } final MapEntry entry = new MapEntryJpa(); for (MapAdvice advice : advices) { if (member.getMapAdvice() != null && member.getMapAdvice().contains(advice.getName())) { entry.addMapAdvice(advice); } } entry.setRule(member.getMapRule()); entry.setMapBlock(member.getMapBlock()); entry.setMapGroup(member.getMapGroup()); entry.setMapPriority(member.getMapPriority()); entry.setMapRelation(relations.get(member.getMapRelationId())); entry.setTargetId(member.getMapTarget()); return entry; } /** * Convert a map entry to a complex map ref set member. Does not set effective * time. * * @param mapEntry the map entry * @param mapRecord the map record * @param mapProject the map project * @param concept the concept * @return the complex map ref set member * @throws Exception the exception */ private ComplexMapRefSetMember getComplexMapRefSetMemberForMapEntry( MapEntry mapEntry, MapRecord mapRecord, MapProject mapProject, Concept concept) throws Exception { ComplexMapRefSetMember complexMapRefSetMember = new ComplexMapRefSetMemberJpa(); // set the base parameters // NOTE: do not set UUID here, done in main logic complexMapRefSetMember.setConcept(concept); complexMapRefSetMember.setRefSetId(mapProject.getRefSetId()); complexMapRefSetMember.setModuleId(Long.valueOf(moduleId)); complexMapRefSetMember.setActive(true); complexMapRefSetMember.setEffectiveTime(dateFormat.parse(effectiveTime)); complexMapRefSetMember.setTerminology(mapProject.getSourceTerminology()); complexMapRefSetMember .setTerminologyVersion(mapProject.getSourceTerminologyVersion()); // set parameters from the map entry complexMapRefSetMember.setMapGroup(mapEntry.getMapGroup()); complexMapRefSetMember.setMapPriority(mapEntry.getMapPriority()); complexMapRefSetMember .setMapRule(mapProject.isRuleBased() ? mapEntry.getRule() : ""); if (mapEntry.getMapRelation() != null) { complexMapRefSetMember.setMapRelationId( Long.valueOf(mapEntry.getMapRelation().getTerminologyId())); } complexMapRefSetMember.setMapTarget( mapEntry.getTargetId() == null ? "" : mapEntry.getTargetId()); /** * Set the map advice from the advices on the entry. * * First, get the human readable map advice. Second, add the attached map * advices. Third, add to advice based on target/relation and rule - If the * map target is blank, advice contains the map relation name - If it's an * IFA rule (gender), add MAP OF SOURCE CONCEPT IS CONTEXT DEPENDENT FOR * GENDER - If it's an IFA rule (age/upproagated), add MAP OF SOURCE CONCEPT * IS CONTEXT DEPENDENT */ // extract all advices and add to a list List<String> sortedAdvices = new ArrayList<>(); for (final MapAdvice mapAdvice : mapEntry.getMapAdvices()) { sortedAdvices.add(mapAdvice.getDetail()); } // check for context dependent advice if (mapEntry.getRule() != null && mapEntry.getRule().startsWith("IFA") && mapEntry.getTargetId() != null && !mapEntry.getTargetId().isEmpty()) { // if not a gender rule, add the advice if (!mapEntry.getRule().contains("| Male (finding) |") && !mapEntry.getRule().contains("| Female (finding) |")) { sortedAdvices.add("MAP OF SOURCE CONCEPT IS CONTEXT DEPENDENT"); // if a gender rule, add the advice } else if (mapEntry.getRule().contains("| Male (finding) |") || mapEntry.getRule().contains("| Female (finding) |")) { sortedAdvices.add("MAP IS CONTEXT DEPENDENT FOR GENDER"); } } String mapAdviceStr = getHumanReadableMapAdvice(mapEntry); // sort unique advices and add them sortedAdvices = new ArrayList<>(new HashSet<>(sortedAdvices)); Collections.sort(sortedAdvices); for (final String advice : sortedAdvices) { mapAdviceStr += (mapAdviceStr.length() != 0 ? " | " : "") + advice; } complexMapRefSetMember.setMapAdvice(mapAdviceStr); return complexMapRefSetMember; } /** * Function to construct propagated rule for an entry. * * @param mapEntry the map entry * @return the map entry */ private MapEntry setPropagatedRuleForMapEntry(MapEntry mapEntry) { MapRecord mapRecord = mapEntry.getMapRecord(); // construct propagated rule based on concept id and name // e.g. for TRUE rule // IFA 104831000119109 | Drug induced central sleep apnea // for age rule // IFA 104831000119109 | Drug induced central sleep apnea // (disorder) | AND IFA 445518008 | Age at onset of clinical finding // (observable entity) | <= 28.0 days // (disorder) String rule = "IFA " + mapRecord.getConceptId() + " | " + defaultPreferredNames.get(mapRecord.getConceptId()) + " |"; // if an age or gender rule, append the existing rule if (!mapEntry.getRule().contains("TRUE")) { rule += " AND " + mapEntry.getRule(); } // set the rule mapEntry.setRule(rule); /* * Logger.getLogger(getClass()) .info(" Set rule to " + rule); */ return mapEntry; } /** * Gets the human readable map advice. * * @param mapEntry the map entry * @return the human readable map advice * @throws Exception the exception */ private String getHumanReadableMapAdvice(MapEntry mapEntry) throws Exception { String advice = ""; // Construct advice only if using Extended Map pattern if (mapProject.getMapRefsetPattern().equals(MapRefsetPattern.ExtendedMap)) { logger.debug(" RULE: " + mapEntry.getRule()); String[] comparatorComponents; // used for parsing age rules // if map target is blank use map relation if (mapEntry.getTargetId() == null || mapEntry.getTargetId() == "") { return mapEntry.getMapRelation().getName(); } // Split rule on "AND IF" conditions int ct = 0; for (String part : mapEntry.getRule().toUpperCase().split(" AND IF")) { ct++; if (ct > 1) { // Put the "if" back in part = "IF" + part; // Add an AND clause advice += " AND "; } logger.debug(" PART : " + part); // if map rule is IFA (age) if (part.contains("AGE AT ONSET OF CLINICAL FINDING") || part.contains("CURRENT CHRONOLOGICAL AGE")) { // IF AGE AT ONSET OF // CLINICAL FINDING BETWEEN 1.0 YEAR AND 18.0 YEARS CHOOSE // M08.939 // Rule examples // IFA 445518008 | Age at onset of clinical finding (observable // entity) | < 65 years // IFA 445518008 | Age at onset of clinical finding (observable // entity) | <= 28.0 days // split by pipe (|) character. Expected fields // 0: ageConceptId // 1: Age rule type (Age at onset, Current chronological age) // 2: Comparator, Value, Units (e.g. < 65 years) String[] ruleComponents = part.split("\\|"); // add the type of age rule advice += "IF " + prepTargetName(part); comparatorComponents = ruleComponents[2].trim().split(" "); // add appropriate text based on comparator switch (comparatorComponents[0]) { case ">": advice += " AFTER"; break; case "<": advice += " BEFORE"; break; case ">=": advice += " ON OR AFTER"; break; case "<=": advice += " ON OR BEFORE"; break; default: throw new Exception( "Illgal operator: " + comparatorComponents[0]); } // add the value and units advice += " " + comparatorComponents[1] + " " + comparatorComponents[2]; } // if a gender rule (i.e. contains (FE)MALE) else if (part.contains("| MALE (FINDING)") || part.contains("| FEMALE (FINDING)")) { // add the advice based on gender if (part.contains("| FEMALE (FINDING)")) { advice += "IF FEMALE"; } else { advice += "IF MALE"; } } // if not an IFA rule (i.e. TRUE, OTHERWISE TRUE), simply return // ALWAYS else if (!part.contains("IFA")) { advice = "ALWAYS " + mapEntry.getTargetId(); } // Handle regular ifa else if (part.contains("IFA")) { String targetName = prepTargetName(part); advice += "IF " + targetName; } } // finally, add the CHOOSE {targetId} if (!advice.startsWith("ALWAYS")) { advice += " CHOOSE " + mapEntry.getTargetId(); } logger.debug(" ADVICE: " + advice); } return advice; } /** * Prep target name. * * @param rule the rule * @return the string */ @SuppressWarnings("static-method") private String prepTargetName(String rule) { String[] ifaComponents = rule.split("\\|"); // remove any (disorder), etc. String targetName = ifaComponents[1].trim(); // if classifier (e.g. (disorder)) present, remove it and any trailing // spaces if (targetName.lastIndexOf("(") != -1) targetName = targetName.substring(0, targetName.lastIndexOf("(")).trim(); return targetName; } /** * Takes a tree position graph and converts it to a sorted list of tree * positions where order is based on depth in tree. * * @param tp the tp * @return the sorted tree position descendant list * @throws Exception the exception */ @SuppressWarnings("static-method") private List<TreePosition> getSortedTreePositionDescendantList( TreePosition tp) throws Exception { // construct list of unprocessed tree positions and initialize with root // position List<TreePosition> positionsToAdd = new ArrayList<>(); positionsToAdd.add(tp); List<TreePosition> sortedTreePositionDescendantList = new ArrayList<>(); while (!positionsToAdd.isEmpty()) { // add the first element sortedTreePositionDescendantList.add(positionsToAdd.get(0)); // add the children of first element for (final TreePosition childTp : positionsToAdd.get(0).getChildren()) { positionsToAdd.add(childTp); } // remove the first element positionsToAdd.remove(0); } // sort the tree positions by position in the hierarchy (e.g. // characters) Collections.sort(sortedTreePositionDescendantList, new Comparator<TreePosition>() { @Override public int compare(TreePosition tp1, TreePosition tp2) { int levels1 = tp1.getAncestorPath().length() - tp1.getAncestorPath().replace("~", "").length(); int levels2 = tp1.getAncestorPath().length() - tp1.getAncestorPath().replace("~", "").length(); // if first has more ~'s than second, it is considered // LESS than the second // i.e. this is a reverse sort return levels2 - levels1; } }); return sortedTreePositionDescendantList; } /** * Returns the machine readable textfor complex map ref set member. * * @param member the complex map ref set member * @param trueEffectiveTimeFlag the true effective time flag * @return the machine readable textfor complex map ref set member * @throws Exception the exception */ private String getOutputLine(ComplexMapRefSetMember member, boolean trueEffectiveTimeFlag) throws Exception { String entryLine = ""; // switch line on map relation style if (mapProject.getMapRefsetPattern() == MapRefsetPattern.ExtendedMap) { entryLine = member.getTerminologyId() // the UUID + "\t" + ((trueEffectiveTimeFlag && !dateFormat .format(member.getEffectiveTime()).equals("100070607")) ? dateFormat.format(member.getEffectiveTime()) : effectiveTime) + "\t" + (member.isActive() ? "1" : "0") + "\t" + moduleId + "\t" + member.getRefSetId() + "\t" + member.getConcept().getTerminologyId() + "\t" + member.getMapGroup() + "\t" + member.getMapPriority() + "\t" + (mapProject.isRuleBased() ? member.getMapRule() : "") + "\t" + member.getMapAdvice() + "\t" + member.getMapTarget() + "\t" + "447561005" + "\t" + member.getMapRelationId(); } // ComplexMap style is identical to ExtendedMap // with the exception of the terminating map relation terminology id else if (mapProject.getMapRefsetPattern() == MapRefsetPattern.ComplexMap) { entryLine = member.getTerminologyId() // the UUID + "\t" + ((trueEffectiveTimeFlag && !dateFormat .format(member.getEffectiveTime()).equals("100070607")) ? dateFormat.format(member.getEffectiveTime()) : effectiveTime) + "\t" + (member.isActive() ? "1" : "0") + "\t" + moduleId + "\t" + member.getRefSetId() + "\t" + member.getConcept().getTerminologyId() + "\t" + member.getMapGroup() + "\t" + member.getMapPriority() + "\t" + member.getMapRule() + "\t" + member.getMapAdvice() + "\t" + member.getMapTarget() + "\t" + member.getMapRelationId(); // Simple map } else if (mapProject.getMapRefsetPattern() == MapRefsetPattern.SimpleMap) { // For simple map, avoid writing entries with blank maps // these are placeholders to better manage scope. if (member.getConcept() == null || member.getConcept().getTerminology() == null || member.getConcept().getTerminology().isEmpty()) { return ""; } entryLine = member.getTerminologyId() // the UUID + "\t" + ((trueEffectiveTimeFlag && !dateFormat .format(member.getEffectiveTime()).equals("100070607")) ? dateFormat.format(member.getEffectiveTime()) : effectiveTime) + "\t" + (member.isActive() ? "1" : "0") + "\t" + moduleId + "\t" + member.getRefSetId() + "\t" + member.getConcept().getTerminologyId() + "\t" + member.getMapTarget(); } entryLine += "\r\n"; return entryLine; } /** * Compute default preferred names. * * @throws Exception the exception */ private void computeDefaultPreferredNames() throws Exception { // get the config properties for default preferred name variables // set the dpn variables and instantiate the concept dpn map Properties properties = ConfigUtility.getConfigProperties(); String dpnTypeId = properties.getProperty("loader.defaultPreferredNames.typeId"); String dpnRefSetId = properties.getProperty("loader.defaultPreferredNames.refSetId"); String dpnAcceptabilityId = properties.getProperty("loader.defaultPreferredNames.acceptabilityId"); // Compute preferred names int ct = 0; for (final MapRecord mapRecord : mapRecords) { ct++; Concept concept = contentService.getConcept(mapRecord.getConceptId(), mapProject.getSourceTerminology(), mapProject.getSourceTerminologyVersion()); // Concept is referenced that is not in th erelease if (concept == null) { throw new Exception( "Unexpected concept id reference that does not exist - " + mapRecord.getConceptId() + ", " + mapProject.getSourceTerminology() + ", " + mapProject.getSourceTerminologyVersion()); } conceptCache.put(concept.getTerminologyId(), concept); if (testModeFlag) { defaultPreferredNames.put(concept.getTerminologyId(), concept.getDefaultPreferredName()); } else { defaultPreferredNames.put(concept.getTerminologyId(), computeDefaultPreferredName(concept, dpnTypeId, dpnRefSetId, dpnAcceptabilityId)); } if (ct % 5000 == 0) { logger.info(" count = " + ct); } } } /** * Helper function to access/add to dpn set. * * @param concept the concept * @param dpnTypeId the dpn type id * @param dpnRefSetId the dpn ref set id * @param dpnAcceptabilityId the dpn acceptability id * @return the string * @throws Exception the exception */ private String computeDefaultPreferredName(Concept concept, String dpnTypeId, String dpnRefSetId, String dpnAcceptabilityId) throws Exception { if (defaultPreferredNames.containsKey(concept.getTerminologyId())) { return defaultPreferredNames.get(concept.getTerminologyId()); } else { // cycle over descriptions for (final Description description : concept.getDescriptions()) { // if active and type id matches if (description.isActive() && description.getTypeId().equals(Long.valueOf(dpnTypeId))) { // cycle over language ref sets for (final LanguageRefSetMember language : description .getLanguageRefSetMembers()) { if (language.getRefSetId().equals(dpnRefSetId) && language.isActive() && language.getAcceptabilityId() .equals(Long.valueOf(dpnAcceptabilityId))) { defaultPreferredNames.put(concept.getTerminologyId(), description.getTerm()); // Report info if semantic tag cannot be found if (!description.getTerm().trim().endsWith(")")) { logger.warn("Could not find semantic tag for concept " + concept.getTerminologyId() + ", name selected=" + description.getTerm()); for (final Description d : concept.getDescriptions()) { logger .warn("Description " + d.getTerminologyId() + ", active=" + d.isActive() + ", typeId = " + d.getTypeId()); for (final LanguageRefSetMember l : d .getLanguageRefSetMembers()) { logger.warn(" Language Refset Member " + l.getTerminologyId() + ", active = " + l.isActive() + ", refsetId=" + l.getRefSetId() + ", acceptabilityId = " + l.getAcceptabilityId()); } } } return description.getTerm(); } } } } throw new Exception( "Could not retrieve default preferred name for Concept " + concept.getTerminologyId()); } } /* see superclass */ @Override public void beginRelease() throws Exception { try { logger = beginLog; clearLog(logger); logger.info(" Starting begin release"); // instantiate required services final MappingService mappingService = new MappingServiceJpa(); final ReportService reportService = new ReportServiceJpa(); reportService.setTransactionPerOperation(false); reportService.beginTransaction(); mappingService.setTransactionPerOperation(false); mappingService.beginTransaction(); // Check preconditions // If there are "PUBLISHED" map entries, require // either "simple" or "complex" map refset members to exist if (mappingService .findMapRecordsForQuery("mapProjectId:" + mapProject.getId() + " AND workflowStatus:PUBLISHED", null) .getSearchResults().size() > 0) { final ContentService contentService = new ContentServiceJpa(); try { if (contentService .getComplexMapRefSetMembersForRefSetId(mapProject.getRefSetId()) .getCount() == 0) { throw new LocalException( "Map has published records but no refset member entries. " + "Reload previous release version file into refset table"); } } catch (Exception e) { throw e; } finally { contentService.close(); } } // get the report definition logger.info(" Create release QA report"); ReportDefinition reportDefinition = null; for (final ReportDefinition rd : mapProject.getReportDefinitions()) { if (rd.getName().equals("Release QA")) reportDefinition = rd; } if (reportDefinition == null) { mappingService.close(); reportService.close(); throw new Exception( "Could not get report definition matching 'Release QA'"); } // create the report QA object and instantiate fields final Report report = new ReportJpa(); report.setActive(true); report.setAutoGenerated(false); report.setDiffReport(false); report.setMapProjectId(mapProject.getId()); report.setName(reportDefinition.getName()); report.setOwner(mappingService.getMapUser("qa")); report.setQuery("No query -- constructed by services"); report.setQueryType(ReportQueryType.NONE); report.setReportDefinition(reportDefinition); report.setResultType(ReportResultType.CONCEPT); report.setTimestamp((new Date()).getTime()); reportService.addReport(report); // get all scope concept terminology ids for this project logger.info(" Get scope concepts for map project"); final Set<String> scopeConceptTerminologyIds = new HashSet<>(); for (final SearchResult sr : mappingService .findConceptsInScope(mapProject.getId(), null).getSearchResults()) { scopeConceptTerminologyIds.add(sr.getTerminologyId()); } logger.info(" count = " + scopeConceptTerminologyIds.size()); // get all map records for this project logger.info(" Get records for map project"); final MapRecordList mapRecords = mappingService.getMapRecordsForMapProject(mapProject.getId()); logger.info(" count = " + mapRecords.getCount()); // create a temp set of scope terminology ids Set<String> conceptsWithNoRecord = new HashSet<>(scopeConceptTerminologyIds); final List<MapRecord> mapRecordsToProcess = mapRecords.getMapRecords(); // create a temp set of concept ids for which a map record exists // (irrespective of scope final Map<String, Integer> conceptMapRecordCountMap = new HashMap<>(); // get all mapping refset members for this project final Map<Long, List<ComplexMapRefSetMember>> refsetMemberMap = new HashMap<>(); for (ComplexMapRefSetMember member : contentService .getComplexMapRefSetMembersForRefSetId(mapProject.getRefSetId()) .getComplexMapRefSetMembers()) { List<ComplexMapRefSetMember> list = refsetMemberMap.get(member.getConcept().getId()); if (list == null) { list = new ArrayList<>(); } // Don't include up-propagated refset member entries // Skip concept exclusion rules if (member.getMapRule() != null && member.getMapRule().matches("IFA.*")) { if (member.getMapAdvice() .contains("MAP IS CONTEXT DEPENDENT FOR GENDER") && !member.getMapRule().contains("AND IFA")) { // unless simple gender rule, then keep } else if (member.getMapRule().matches( "IFA\\s\\d*\\s\\|\\s.*\\s\\|\\s[<>].*AND IFA\\s\\d*\\s\\|\\s.*\\s\\|\\s[<>].*") && !member.getMapRule().matches(".*AND IFA.*AND IFA.*")) { // unless 2-part age rule, then keep } else if (member.getMapRule() .matches("IFA\\s\\d*\\s\\|\\s.*\\s\\|\\s[<>].*") && !member.getMapRule().contains("AND IFA")) { // unless simple age rule without compund clause, then keep } else { // else skip Logger.getLogger(MappingServiceJpa.class) .debug(" Skipping refset member exclusion rule " + member.getTerminologyId()); continue; } } list.add(member); refsetMemberMap.put(member.getConcept().getId(), list); } // for each map record, check for errors // NOTE: Report Result names are constructed from error lists assigned // Each individual result is stored as a Report Result Item logger.info(" Validate records"); boolean errorFlag = false; int pubCt = 0; while (mapRecordsToProcess.size() != 0) { // extract the concept and remove it from list final MapRecord mapRecord = mapRecordsToProcess.get(0); mapRecordsToProcess.remove(0); logger.debug(" concept = " + mapRecord.getConceptId() + " " + mapRecord.getConceptName()); // first, remove this concept id from the dynamic conceptsWithNoRecord // set conceptsWithNoRecord.remove(mapRecord.getConceptId()); // instantiate or increment the number of map records for this concept // NOTE: Only for published/ready for publication if (mapRecord.getWorkflowStatus().equals(WorkflowStatus.PUBLISHED) || mapRecord.getWorkflowStatus() .equals(WorkflowStatus.READY_FOR_PUBLICATION)) { if (!conceptMapRecordCountMap.containsKey(mapRecord.getConceptId())) { conceptMapRecordCountMap.put(mapRecord.getConceptId(), 1); } else { conceptMapRecordCountMap.put(mapRecord.getConceptId(), conceptMapRecordCountMap.get(mapRecord.getConceptId()) + 1); } } // constuct a list of errors for this concept final List<String> resultMessages = new ArrayList<>(); // CHECK: One to one project record has unique mapping if (algorithmHandler.isOneToOneConstrained()) { // check for violation of target codes if (algorithmHandler.recordViolatesOneToOneConstraint(mapRecord)) { resultMessages.add(mapProject.getDestinationTerminology() + " target used more than once"); } // check for than one entry if (mapRecord.getMapEntries().size() > 1) { resultMessages.add("Map record has more than one entry"); } } // CHECK: Map record is READY_FOR_PUBLICATION or PUBLISHED if (!mapRecord.getWorkflowStatus() .equals(WorkflowStatus.READY_FOR_PUBLICATION) && !mapRecord.getWorkflowStatus() .equals(WorkflowStatus.PUBLISHED)) { resultMessages.add("Map record not marked ready for publication"); errorFlag = true; // if record is ready for publication } else { // Make sure map entries are sorted by by mapGroup/mapPriority Collections.sort(mapRecord.getMapEntries(), new TerminologyUtility.MapEntryComparator()); // CHECK: Map record (must be ready for publication) passes project // specific validation checks ValidationResult result = algorithmHandler.validateRecord(mapRecord); if (!result.isValid()) { logger.debug(" FAILED"); errorFlag = true; resultMessages.add("Map record failed validation check"); } else { pubCt++; } } // Check for out of scope map records if (!scopeConceptTerminologyIds.contains(mapRecord.getConceptId())) { // construct message based on whether record is to be removed String reportMsg = mapProject.getSourceTerminology() + " concept not in scope"; // separate error-type by previously-published or this-cycle-edited if (mapRecord.getWorkflowStatus().equals(WorkflowStatus.PUBLISHED)) { resultMessages.add(reportMsg + " - previously published"); } else { resultMessages.add(reportMsg + " - edited this cycle"); } } // Concept and refset integrity checks final Concept concept = contentService.getConcept( mapRecord.getConceptId(), mapProject.getSourceTerminology(), mapProject.getSourceTerminologyVersion()); // 1. Mapped concepts that are inactive in current SNOMED release // 2. Mapped concepts not in snomed (e.g. because of drip feed issues)// if (concept == null) { resultMessages.add( mapProject.getSourceTerminology() + " concept does not exist"); continue; } else if (!concept.isActive()) { resultMessages .add(mapProject.getSourceTerminology() + " concept inactive"); } // Check: Destination terminology codes NOT used in previous version of // the map final Set<String> unusedTargetCodes = new HashSet<>(); if (refsetMemberMap.containsKey(concept.getId())) { final List<ComplexMapRefSetMember> members = refsetMemberMap.get(concept.getId()); for (final ComplexMapRefSetMember member : members) { if (member.isActive() && member.getMapTarget() != null && !member.getMapTarget().isEmpty()) { boolean memberTargetFound = false; for (MapEntry me : mapRecord.getMapEntries()) { if (member.getMapTarget().equals(me.getTargetId())) { memberTargetFound = true; } } if (!memberTargetFound) { unusedTargetCodes.add(member.getMapTarget()); } } } if (unusedTargetCodes.size() > 0) { String str = ""; for (final String unusedTargetCode : unusedTargetCodes) { final Concept targetConcept = contentService.getConcept( unusedTargetCode, mapProject.getDestinationTerminology(), mapProject.getDestinationTerminologyVersion()); str += unusedTargetCode + " " + (targetConcept == null ? "Unknown name" : targetConcept.getDefaultPreferredName()) + "; "; } // truncate too-long strings (db constraint) str = str.substring(0, Math.min(255, str.length() - 2)); // add names of target codes instead of source concept default // preferred name this.addReportError(report, mapProject, concept.getTerminologyId(), str, mapProject.getDestinationTerminology() + " target code from previous release not used"); } // check: concept mapped to multiple codes (non-group-based only) // check: concept mapped to duplicate codes (non-group-based only) if (mapProject.isGroupStructure()) { final Set<String> targetIds = new HashSet<>(); for (MapEntry entry : mapRecord.getMapEntries()) { if (entry.getTargetId() != null && !entry.getTargetId().isEmpty()) { if (targetIds.contains(entry.getTargetId())) { this.addReportError(report, mapProject, mapRecord.getConceptId(), concept.getDefaultPreferredName(), "Concept mapped to duplicate " + mapProject.getDestinationTerminology() + " codes"); break; } } } if (targetIds.size() > 1) { this.addReportError(report, mapProject, mapRecord.getConceptId(), concept.getDefaultPreferredName(), "Concept mapped to multiple " + mapProject.getDestinationTerminology() + " codes"); } } } // Add all reported errors to the report for (final String error : resultMessages) { addReportError(report, mapProject, mapRecord.getConceptId(), mapRecord.getConceptName(), error); } } // add multiple map record mappings to report if present for (String conceptId : conceptMapRecordCountMap.keySet()) { if (conceptMapRecordCountMap.get(conceptId) > 1) { // get the concept Concept c = contentService.getConcept(conceptId, mapProject.getSourceTerminology(), mapProject.getSourceTerminologyVersion()); addReportError(report, mapProject, conceptId, c.getDefaultPreferredName(), "Concept has multiple map records"); } } // Check: Source terminology concepts in previous version NOT in // current version (possibly with RF2 line from previous version map as ComplexMapRefSetMemberList members = contentService .getComplexMapRefSetMembersForRefSetId(mapProject.getRefSetId()); for (ComplexMapRefSetMember member : members .getComplexMapRefSetMembers()) { Concept sourceConcept = member.getConcept(); if (member.isActive() && sourceConcept != null && sourceConcept.isActive() && !conceptMapRecordCountMap .containsKey(member.getConcept().getTerminologyId())) { this.addReportError(report, mapProject, member.getConcept().getTerminologyId(), member.getConcept().getDefaultPreferredName(), "Concept mapped in previous version no longer mapped"); } } ReportResult pubCtResult = new ReportResultJpa(); pubCtResult.setReport(report); pubCtResult.setProjectName(mapProject.getName()); pubCtResult.setValue("Ready for publication: " + pubCt); pubCtResult.setReportResultItems(null); report.addResult(pubCtResult); // CHECK: In-scope concepts with no map record logger.debug(" Report in scope concepts with no record"); for (final String terminologyId : conceptsWithNoRecord) { // get the concept Concept c = contentService.getConcept(terminologyId, mapProject.getSourceTerminology(), mapProject.getSourceTerminologyVersion()); addReportError(report, mapProject, terminologyId, c.getDefaultPreferredName(), "In-scope concept has no map record"); errorFlag = true; } logger.info(" Adding Release QA Report"); logger.info(" Log into the application to see the report results"); // Commit the new report either way reportService.commit(); // way to override the errors if we want to proceed with a release anyway if (!testModeFlag) { if (errorFlag) { mappingService.rollback(); mappingService.close(); reportService.close(); throw new Exception("The validation had errors, please see the log"); } else { mappingService.commit(); } } logger.info(" Done begin release"); mappingService.close(); reportService.close(); } catch (Exception e) { logger.error(e.getMessage(), e); throw new Exception(e); } } /** * Adds the report error. * * @param report the report * @param mapProject the map project * @param terminologyId the terminology id * @param conceptName the concept name * @param error the error */ @SuppressWarnings("static-method") private void addReportError(Report report, MapProject mapProject, String terminologyId, String conceptName, String error) { ReportResult reportResult = null; // find the report result corresponding to this error, if it exists for (final ReportResult rr : report.getResults()) { if (rr.getValue().equals(error)) { reportResult = rr; } } // if no result found, create one if (reportResult == null) { reportResult = new ReportResultJpa(); reportResult.setReport(report); reportResult.setValue(error); reportResult.setProjectName(mapProject.getName()); report.addResult(reportResult); } ReportResultItem existingItem = null; for (final ReportResultItem item : reportResult.getReportResultItems()) { if (item.getItemId().equals(terminologyId)) { existingItem = item; } } // if this item does not yet exist, add it if (existingItem == null) { ReportResultItem item = new ReportResultItemJpa(); item.setReportResult(reportResult); item.setItemId(terminologyId); item.setItemName(conceptName); item.setResultType(ReportResultType.CONCEPT); reportResult.addReportResultItem(item); } } /* see superclass */ @Override public void finishRelease() throws Exception { try { logger = null; if (testModeFlag) { logger = previewFinishLog; } else { logger = finishLog; } clearLog(logger); logger.info("Starting " + (testModeFlag ? "Preview Finish Release" : "Finish Release")); logger.info("transactionPerOperation " + mappingService.getTransactionPerOperation()); Logger.getLogger(getClass()).info("transactionPerOperation " + mappingService.getTransactionPerOperation()); // instantiate required services /* * final MappingService mappingService = new MappingServiceJpa(); if * (!testModeFlag) { mappingService.setTransactionPerOperation(false); * mappingService.beginTransaction(); } */ // compare file to current records Report report = compareInputFileToExistingMapRecords(); int pubCt = 0; // get all scope concept terminology ids for this project logger.info(" Get scope concepts for map project"); Set<String> scopeConceptTerminologyIds = new HashSet<>(); for (final SearchResult sr : mappingService .findConceptsInScope(mapProject.getId(), null).getSearchResults()) { scopeConceptTerminologyIds.add(sr.getTerminologyId()); } logger.info(" scope concepts: " + scopeConceptTerminologyIds.size()); if (mapRecords == null || mapRecords.isEmpty()) { MapRecordList mapRecordList = mappingService .getPublishedAndReadyForPublicationMapRecordsForMapProject( mapProject.getId(), null); mapRecords = mapRecordList.getMapRecords(); // Log recently edited records that won't be PUBLISHED for (Long recordId : recentlyEditedRecords) { logger.info( " Recently edited record will not be PUBLISHED " + recordId); } for (final MapRecord record : mapRecords) { // Remove out of scope concepts if not in test mode if (!scopeConceptTerminologyIds.contains(record.getConceptId())) { // remove record if flag set if (!testModeFlag) { logger.info(" REMOVE out of scope record " + record.getId()); mappingService.removeMapRecord(record.getId()); } else { this.addReportError(report, mapProject, record.getConceptId(), record.getConceptName(), "Map record for concept out of scope will be removed"); } } // Record not marked as PUBLISHED because it has been modified again // since // the publication date else if (record .getWorkflowStatus() == WorkflowStatus.READY_FOR_PUBLICATION && recentlyEditedRecords.contains(record.getId()) && conceptsFromReleaseFile.contains(record.getConceptId())) { logger.info(" Record not updated to PUBLISHED for " + record.getConceptId() + " " + record.getConceptName()); } // Mark record as PUBLISHED if READY FOR PUBLICATION and in scope else if (record .getWorkflowStatus() == WorkflowStatus.READY_FOR_PUBLICATION && !recentlyEditedRecords.contains(record.getId()) && conceptsFromReleaseFile.contains(record.getConceptId())) { logger.info(" Update record to PUBLISHED for " + record.getConceptId() + " " + record.getConceptName()); pubCt++; // regularly log at intervals if (pubCt % 200 == 0) { logger.info(" published count = " + pubCt); } if (!testModeFlag) { record.setWorkflowStatus(WorkflowStatus.PUBLISHED); mappingService.updateMapRecord(record); } } } // Set latest publication date to now. if (!testModeFlag) { mapProject.setLatestPublicationDate(new Date()); // mapProject.setPublic(true); mappingService.updateMapProject(mapProject); // mappingService.commit(); } } this.addReportError(report, mapProject, "", "Aggregate result (no content)", pubCt + " map records " + (testModeFlag ? "will be " : "") + " marked Published"); // skip if in test mode if (!testModeFlag) { // clear old map refset logger.info(" Clear map refset"); clearMapRefSet(); // Load map refset logger.info(" Load map refset"); loadMapRefSet(); } logger.info(" Committing finish release report"); ReportService reportService = new ReportServiceJpa(); reportService.addReport(report); reportService.close(); mappingService.close(); logger.info("Finished " + (testModeFlag ? "test mode " : "") + "release successfully"); } catch (Exception e) { logger.error(e.getMessage(), e); throw new Exception(e); } } /** * Clear complex map refsets for a map project. * * @throws Exception the exception */ private void clearMapRefSet() throws Exception { // begin transaction final ContentService contentService = new ContentServiceJpa(); contentService.setTransactionPerOperation(false); contentService.beginTransaction(); for (final ComplexMapRefSetMember member : contentService .getComplexMapRefSetMembersForRefSetId(mapProject.getRefSetId()) .getIterable()) { logger.debug(" Remove member - " + member); if (!testModeFlag) { if (mapProject.getMapRefsetPattern() != MapRefsetPattern.SimpleMap) { contentService.removeComplexMapRefSetMember(member.getId()); } else { contentService.removeSimpleMapRefSetMember(member.getId()); } } } contentService.commit(); contentService.close(); } /** * Load map refset from file. * * @throws Exception the exception */ @SuppressWarnings("resource") private void loadMapRefSet() throws Exception { String line = ""; int objectCt = 0; // begin transaction final ContentService contentService = new ContentServiceJpa(); contentService.setTransactionPerOperation(false); contentService.beginTransaction(); logger.info(" Open " + inputFile); File f = new File(inputFile); if (!f.exists()) { throw new Exception("Input file does not exist: " + f.toString()); } BufferedReader reader = new BufferedReader(new FileReader(f)); final String terminology = mapProject.getSourceTerminology(); final String version = mapProject.getSourceTerminologyVersion(); while ((line = reader.readLine()) != null) { line = line.replace("\r", ""); final String fields[] = line.split("\t"); // skip header if (!fields[0].equals("id")) { final ComplexMapRefSetMember member = new ComplexMapRefSetMemberJpa(); member.setTerminologyId(fields[0]); member.setEffectiveTime(dateFormat.parse(fields[1])); member.setActive(fields[2].equals("1") ? true : false); member.setModuleId(Long.valueOf(fields[3])); member.setRefSetId(fields[4]); // conceptId // Terminology attributes member.setTerminology(terminology); member.setTerminologyVersion(version); // set Concept final Concept concept = contentService.getConcept(fields[5], terminology, version); if (mapProject.getMapRefsetPattern() != MapRefsetPattern.SimpleMap) { // ComplexMap unique attributes member.setMapGroup(Integer.parseInt(fields[6])); member.setMapPriority(Integer.parseInt(fields[7])); member.setMapRule(fields[8]); member.setMapAdvice(fields[9]); member.setMapTarget(fields[10]); if (mapProject.getMapRefsetPattern() == MapRefsetPattern.ComplexMap) { member.setMapRelationId(Long.valueOf(fields[11])); } else if (mapProject .getMapRefsetPattern() == MapRefsetPattern.ExtendedMap) { member.setMapRelationId(Long.valueOf(fields[12])); } else { throw new Exception( "Unsupported map type " + mapProject.getMapRefsetPattern()); } // ComplexMap unique attributes NOT set by file (mapBlock // elements) - set defaults member.setMapBlock(0); member.setMapBlockRule(null); member.setMapBlockAdvice(null); } else { member.setMapGroup(1); member.setMapPriority(1); member.setMapRule(null); member.setMapAdvice(null); member.setMapTarget(fields[6]); member.setMapRelationId(null); } // regularly log and commit at intervals if (++objectCt % 5000 == 0) { logger.info(" count = " + objectCt); contentService.commit(); contentService.clear(); contentService.beginTransaction(); contentService.commit(); contentService.clear(); contentService.beginTransaction(); } if (concept != null) { logger.debug(" Add member - " + member); if (!testModeFlag) { member.setConcept(concept); if (mapProject .getMapRefsetPattern() != MapRefsetPattern.SimpleMap) { contentService.addComplexMapRefSetMember(member); } else { contentService.addSimpleMapRefSetMember( new SimpleMapRefSetMemberJpa(member)); } } } else { throw new Exception( "Member references non-existent concept - " + member); } } } // commit any remaining objects contentService.commit(); contentService.close(); reader.close(); } /** * Load map refset from file. * * @return the report * @throws Exception the exception */ @SuppressWarnings("resource") private Report compareInputFileToExistingMapRecords() throws Exception { String line = ""; // begin transaction final ContentService contentService = new ContentServiceJpa(); logger.info(" Open " + inputFile); File f = new File(inputFile); if (!f.exists()) { throw new Exception("Input file does not exist: " + f.toString()); } BufferedReader reader = new BufferedReader(new FileReader(f)); final String terminology = mapProject.getSourceTerminology(); final String version = mapProject.getSourceTerminologyVersion(); Map<String, List<ComplexMapRefSetMember>> conceptRefSetMap = new HashMap<>(); while ((line = reader.readLine()) != null) { line = line.replace("\r", ""); final String fields[] = line.split("\t"); // skip header and inactive refsets if (!fields[0].equals("id") && fields[2].equals("1")) { final ComplexMapRefSetMember member = new ComplexMapRefSetMemberJpa(); member.setTerminologyId(fields[0]); member.setEffectiveTime(dateFormat.parse(fields[1])); member.setActive(fields[2].equals("1") ? true : false); member.setModuleId(Long.valueOf(fields[3])); if (moduleId == null) { moduleId = fields[3]; } member.setRefSetId(fields[4]); // conceptId final Concept tempConcept = new ConceptJpa(); tempConcept.setTerminologyId(fields[5]); member.setConcept(tempConcept); // Terminology attributes member.setTerminology(terminology); member.setTerminologyVersion(version); if (mapProject.getMapRefsetPattern() != MapRefsetPattern.SimpleMap) { // ComplexMap unique attributes member.setMapGroup(Integer.parseInt(fields[6])); member.setMapPriority(Integer.parseInt(fields[7])); if (fields[8].equals("OTHERWISE TRUE")) { member.setMapRule("TRUE"); } else { member.setMapRule(fields[8]); } if (fields[9].contains("|")) { member .setMapAdvice(fields[9].substring(fields[9].indexOf("|") + 2)); } else { member.setMapAdvice(fields[9]); } member.setMapTarget(fields[10]); if (mapProject.getMapRefsetPattern() == MapRefsetPattern.ComplexMap) { member.setMapRelationId(Long.valueOf(fields[11])); } else if (mapProject .getMapRefsetPattern() == MapRefsetPattern.ExtendedMap) { member.setMapRelationId(Long.valueOf(fields[12])); } else { throw new Exception( "Unsupported map type " + mapProject.getMapRefsetPattern()); } // ComplexMap unique attributes NOT set by file (mapBlock // elements) - set defaults member.setMapBlock(0); member.setMapBlockRule(null); member.setMapBlockAdvice(null); } else { member.setMapGroup(1); member.setMapPriority(1); member.setMapRule(null); member.setMapAdvice(null); member.setMapTarget(fields[6]); member.setMapRelationId(null); } // Skip concept exclusion rules if (member.getMapRule() != null && member.getMapRule().matches("IFA.*")) { if (member.getMapAdvice() .contains("MAP IS CONTEXT DEPENDENT FOR GENDER") && !member.getMapRule().contains("AND IFA")) { // unless simple gender rule, then keep } else if (member.getMapRule().matches( "IFA\\s\\d*\\s\\|\\s.*\\s\\|\\s[<>].*AND IFA\\s\\d*\\s\\|\\s.*\\s\\|\\s[<>].*") && !member.getMapRule().matches(".*AND IFA.*AND IFA.*")) { // unless 2-part age rule, then keep } else if (member.getMapRule() .matches("IFA\\s\\d*\\s\\|\\s.*\\s\\|\\s[<>].*") && !member.getMapRule().contains("AND IFA")) { // unless simple age rule without compound clause, then keep } else { // else skip Logger.getLogger(MappingServiceJpa.class) .debug(" Skipping refset member exclusion rule " + member.getTerminologyId()); continue; } } List<ComplexMapRefSetMember> members = conceptRefSetMap.get(member.getConcept().getTerminologyId()); if (members == null) { members = new ArrayList<>(); } // if we get a TRUE/MAP SOURCE CONCEPT CANNOT BE CLASSIFIED WITH // AVAILABLE DATA member // AND we've already seen a TRUE member // AND it's not in the same map group as an allowed IFA member // skip this member. It is due to up-propagation and shouldn't be // compared. if (member.getMapRule() != null && member.getMapRule().equals("TRUE") && member.getMapAdvice() != null && member.getMapAdvice().equals( "MAP SOURCE CONCEPT CANNOT BE CLASSIFIED WITH AVAILABLE DATA") && containsTrueMember(members) && !containsIFAMember(members, member.getMapGroup())) { continue; } members.add(member); conceptRefSetMap.put(member.getConcept().getTerminologyId(), members); conceptsFromReleaseFile.add(member.getConcept().getTerminologyId()); } } logger.info(conceptRefSetMap.size() + " concept ids with mappings"); // close any remaining objects reader.close(); // construct report WorkflowService workflowService = new WorkflowServiceJpa(); ReportService reportService = new ReportServiceJpa(); ReportDefinition qaDef = null; for (ReportDefinition definition : reportService.getReportDefinitions() .getReportDefinitions()) { if (definition.getName().equals("Release Finalization QA")) { qaDef = definition; break; } } if (qaDef == null) { qaDef = new ReportDefinitionJpa(); qaDef.setDescription( "Compares release input file to current state of mappings and identifies potential mismatches"); qaDef.setDiffReport(false); qaDef.setFrequency(ReportFrequency.ON_DEMAND); qaDef.setName("Release Finalization QA"); qaDef.setQACheck(false); qaDef.setQueryType(ReportQueryType.NONE); qaDef.setResultType(ReportResultType.CONCEPT); qaDef.setRoleRequired(MapUserRole.LEAD); reportService.addReportDefinition(qaDef); } Report report = new ReportJpa(); report.setReportDefinition(qaDef); report.setMapProjectId(mapProject.getId()); report.setOwner(mappingService.getMapUser("qa")); report.setAutoGenerated(false); report.setName("Release Finalization QA"); report.setQuery("No query -- constructed by services"); report.setTimestamp(new Date().getTime()); // if test mode flag, add a null entry indicating if (testModeFlag) { this.addReportError(report, mapProject, "", "Note indicator (empty content)", "Finish Release run in TEST mode"); } // get loader user for construction/update of records MapUser loaderUser = mappingService.getMapUser("loader"); // counter for number of records matching between current and release int matchCt = 0; logger.info("Checking for discrepancies..."); for (String conceptId : conceptRefSetMap.keySet()) { final List<ComplexMapRefSetMember> members = conceptRefSetMap.get(conceptId); Concept concept = contentService.getConcept(conceptId, mapProject.getSourceTerminology(), mapProject.getSourceTerminologyVersion()); boolean discrepancyFound = false; Date nowDate = new Date(); MapRecord releaseRecord = new MapRecordJpa(); releaseRecord.setMapProjectId(mapProject.getId()); releaseRecord.setConceptId(conceptId); releaseRecord.setConceptName(concept.getDefaultPreferredName()); releaseRecord.setLastModified(nowDate.getTime()); releaseRecord.setLastModifiedBy(loaderUser); releaseRecord.setTimestamp(nowDate.getTime()); releaseRecord.setOwner(loaderUser); releaseRecord.setWorkflowStatus(WorkflowStatus.PUBLISHED); // get the map record for this concept id MapRecord mapRecord = null; try { mapRecord = getMapRecordForTerminologyId(conceptId); } catch (Exception e) { // do nothing, getMapRecord throws exception intended // to stop release where true errors exist } if (mapRecord == null) { logger.info( "Discrepancy: no current map record for concept id " + conceptId); discrepancyFound = true; } // skip records still in the workflow else if (!mapRecord.getWorkflowStatus().equals(WorkflowStatus.PUBLISHED) && !mapRecord.getWorkflowStatus() .equals(WorkflowStatus.READY_FOR_PUBLICATION)) { System.out.println( "Skipping workflow status: " + mapRecord.getWorkflowStatus()); continue; } // if entries are mismatched in size, automatic flag else if (mapRecord.getMapEntries().size() != members.size()) { logger.info("Discrepancy: entry set size mismatch for " + conceptId); discrepancyFound = true; } // otherwise, check for discrepancies in entries else { for (MapEntry recordEntry : mapRecord.getMapEntries()) { boolean entryMatched = false; for (ComplexMapRefSetMember member : members) { if (getHash(recordEntry).equals(getHash(member))) { entryMatched = true; break; } } if (!entryMatched) { logger.info( "Discrepancy: current mapping has no corresponding release mapping " + conceptId); discrepancyFound = true; break; } } // check release mappings against current mappings for (ComplexMapRefSetMember member : members) { final String memberHash = getHash(member); MapEntry releaseEntry = this.getMapEntryForComplexMapRefSetMember(member); releaseEntry.setMapRecord(mapRecord); releaseRecord.addMapEntry(releaseEntry); boolean entryMatched = false; for (MapEntry recordEntry : mapRecord.getMapEntries()) { if (getHash(recordEntry).equals(memberHash)) { entryMatched = true; if (entryMatched && !releaseEntry.isEquivalent(recordEntry)) { logger.info( "Discrepancy: release mapping has non-equivalent corresponding current mapping " + conceptId); discrepancyFound = true; break; } } } if (!entryMatched) { logger.info("Discrepancy: no current map record for concept id " + conceptId); discrepancyFound = true; } } } // if discrepancy found, add or update the map record if (discrepancyFound) { logger.info("Discrepancy found for " + conceptId + "|" + concept.getDefaultPreferredName() + "|"); if (mapRecord != null) { if (new Date(mapRecord.getLastModified()) .after(mapProject.getEditingCycleBeginDate())) { logger.info("Recently edited discrepancy found for " + conceptId + "|" + concept.getDefaultPreferredName() + "|"); recentlyEditedRecords.add(mapRecord.getId()); this.addReportError(report, mapProject, conceptId, concept.getDefaultPreferredName(), "Map record discrepancy with recent edits-- will not be updated to release version"); } else { if (!testModeFlag) { // remove and re-add map record to clear previous entries mappingService.removeMapRecord(mapRecord.getId()); mappingService.addMapRecord(releaseRecord); } this.addReportError(report, mapProject, conceptId, concept.getDefaultPreferredName(), "Map record discrepancy -- " + (testModeFlag ? "will be " : "") + "updated to release version"); } } else { this.addReportError(report, mapProject, conceptId, concept.getDefaultPreferredName(), "Map record found in release but no current record found -- no action " + (testModeFlag ? "will be " : "") + "taken"); } } else { matchCt++; } } this.addReportError(report, mapProject, "", "Aggregate result (no content)", matchCt + " records matched between release and current records"); reportService.close(); workflowService.close(); contentService.close(); return report; } private boolean containsTrueMember(List<ComplexMapRefSetMember> members) { for (ComplexMapRefSetMember m : members) { if (m.getMapRule().equals("TRUE")) { return true; } } return false; } private boolean containsIFAMember(List<ComplexMapRefSetMember> members, int mapGroup) { for (ComplexMapRefSetMember m : members) { if (m.getMapRule().matches("IFA.*") && m.getMapGroup() == mapGroup) { return true; } } return false; } /** * Update statistic max. * * @param statistic the statistic * @param value the value */ private void updateStatMax(String statistic, int value) { Integer stat = reportStatistics.get(statistic); if (stat == null) { reportStatistics.put(statistic, value); } else { reportStatistics.put(statistic, Math.max(stat, value)); } } /* see superclass */ @Override public void setEffectiveTime(String effectiveTime) { this.effectiveTime = effectiveTime; } /* see superclass */ @Override public void setModuleId(String moduleId) { this.moduleId = moduleId; } /* see superclass */ @Override public void setOutputDir(String outputDir) { this.outputDir = outputDir; } /* see superclass */ @Override public void setWriteSnapshot(boolean writeSnapshot) { this.writeSnapshot = writeSnapshot; } /* see superclass */ @Override public void setWriteActiveSnapshot(boolean writeActiveSnapshot) { this.writeActiveSnapshot = writeActiveSnapshot; } /* see superclass */ @Override public void setWriteDelta(boolean writeDelta) { this.writeDelta = writeDelta; } /* see superclass */ @Override public void setMapProject(MapProject mapProject) throws InstantiationException, IllegalAccessException, InvocationTargetException, NoSuchMethodException, ClassNotFoundException { this.mapProject = mapProject; // instantiate the algorithm handler algorithmHandler = mappingService.getProjectSpecificAlgorithmHandler(mapProject); initializeLogs(); } /* see superclass */ @Override public void setMapRecords(List<MapRecord> mapRecords) { this.mapRecords = mapRecords; } private void initializeLogs() { try { String rootPath = ConfigUtility.getConfigProperties() .getProperty("map.principle.source.document.dir"); if (!rootPath.endsWith("/") && !rootPath.endsWith("\\")) { rootPath += "/"; } rootPath += mapProject.getId() + "/logs"; File logDirectory = new File(rootPath); if (!logDirectory.exists()) { logDirectory.mkdir(); } File beginLogFile = new File(logDirectory, "begin.log"); LoggerUtility.setConfiguration("beginRelease", beginLogFile.getAbsolutePath()); beginLog = LoggerUtility.getLogger("beginRelease"); File processLogFile = new File(logDirectory, "process.log"); LoggerUtility.setConfiguration("processRelease", processLogFile.getAbsolutePath()); processLog = LoggerUtility.getLogger("processRelease"); File previewFinishLogFile = new File(logDirectory, "previewFinish.log"); LoggerUtility.setConfiguration("previewFinishRelease", previewFinishLogFile.getAbsolutePath()); previewFinishLog = LoggerUtility.getLogger("previewFinishRelease"); File finishLogFile = new File(logDirectory, "finish.log"); LoggerUtility.setConfiguration("finishRelease", finishLogFile.getAbsolutePath()); finishLog = LoggerUtility.getLogger("finishRelease"); } catch (Exception e) { logger.error(e.getStackTrace()); } } private void clearLog(Logger logger) { try { String rootPath = ConfigUtility.getConfigProperties() .getProperty("map.principle.source.document.dir"); if (!rootPath.endsWith("/") && !rootPath.endsWith("\\")) { rootPath += "/"; } rootPath += mapProject.getId() + "/logs"; File logDirectory = new File(rootPath); if (!logDirectory.exists()) { return; } if (logger == beginLog) { File beginLogFile = new File(logDirectory, "begin.log"); PrintWriter writer = new PrintWriter(beginLogFile); writer.print(""); writer.close(); } if (logger == processLog) { File processLogFile = new File(logDirectory, "process.log"); PrintWriter writer = new PrintWriter(processLogFile); writer.print(""); writer.close(); } File previewFinishLogFile = new File(logDirectory, "previewFinish.log"); LoggerUtility.setConfiguration("previewFinishRelease", previewFinishLogFile.getAbsolutePath()); previewFinishLog = LoggerUtility.getLogger("previewFinishRelease"); File finishLogFile = new File(logDirectory, "finish.log"); LoggerUtility.setConfiguration("finishRelease", finishLogFile.getAbsolutePath()); finishLog = LoggerUtility.getLogger("finishRelease"); } catch (Exception e) { logger.info(e.getStackTrace()); } } /* see superclass */ @Override public void setInputFile(String inputFile) { this.inputFile = inputFile; } /** * Returns the pattern for type. * * @param mapProject the map project * @return the pattern for type */ @Override public String getPatternForType(MapProject mapProject) { if (mapProject.getMapRefsetPattern() == MapRefsetPattern.SimpleMap) { return "sRefset_"; } else if (mapProject .getMapRefsetPattern() == MapRefsetPattern.ComplexMap) { return "iissscRefset_"; } else if (mapProject .getMapRefsetPattern() == MapRefsetPattern.ExtendedMap) { return "iisssccRefset_"; } return null; } /** * Returns the header. * * @param mapProject the map project * @return the header */ @SuppressWarnings("static-method") private String getHeader(MapProject mapProject) { if (mapProject.getMapRefsetPattern() == MapRefsetPattern.SimpleMap) { return "id\teffectiveTime\tactive\tmoduleId\trefsetId\treferencedComponentId\tmapTarget"; } else if (mapProject .getMapRefsetPattern() == MapRefsetPattern.ComplexMap) { return "id\teffectiveTime\tactive\tmoduleId\trefsetId\treferencedComponentId\t" + "mapGroup\tmapPriority\tmapRule\tmapAdvice\tmapTarget\tcorrelationId"; } else if (mapProject .getMapRefsetPattern() == MapRefsetPattern.ExtendedMap) { return "id\teffectiveTime\tactive\tmoduleId\trefsetId\treferencedComponentId\t" + "mapGroup\tmapPriority\tmapRule\tmapAdvice\tmapTarget\tcorrelationId\tmapCategoryId"; } return null; } }
package org.ihtsdo.otf.mapping.jpa.services; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import javax.persistence.NoResultException; import javax.xml.bind.annotation.XmlTransient; import org.apache.log4j.Logger; import org.apache.lucene.queryParser.MultiFieldQueryParser; import org.apache.lucene.queryParser.QueryParser; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.util.Version; import org.hibernate.criterion.MatchMode; import org.hibernate.envers.AuditReader; import org.hibernate.envers.AuditReaderFactory; import org.hibernate.envers.query.AuditEntity; import org.hibernate.envers.query.AuditQuery; import org.hibernate.search.SearchFactory; import org.hibernate.search.jpa.FullTextEntityManager; import org.hibernate.search.jpa.Search; import org.ihtsdo.otf.mapping.helpers.MapAdviceList; import org.ihtsdo.otf.mapping.helpers.MapAdviceListJpa; import org.ihtsdo.otf.mapping.helpers.MapAgeRangeList; import org.ihtsdo.otf.mapping.helpers.MapAgeRangeListJpa; import org.ihtsdo.otf.mapping.helpers.MapPrincipleList; import org.ihtsdo.otf.mapping.helpers.MapPrincipleListJpa; import org.ihtsdo.otf.mapping.helpers.MapProjectList; import org.ihtsdo.otf.mapping.helpers.MapProjectListJpa; import org.ihtsdo.otf.mapping.helpers.MapRecordList; import org.ihtsdo.otf.mapping.helpers.MapRecordListJpa; import org.ihtsdo.otf.mapping.helpers.MapRelationList; import org.ihtsdo.otf.mapping.helpers.MapRelationListJpa; import org.ihtsdo.otf.mapping.helpers.MapUserList; import org.ihtsdo.otf.mapping.helpers.MapUserListJpa; import org.ihtsdo.otf.mapping.helpers.MapUserPreferencesList; import org.ihtsdo.otf.mapping.helpers.MapUserPreferencesListJpa; import org.ihtsdo.otf.mapping.helpers.MapUserRole; import org.ihtsdo.otf.mapping.helpers.PfsParameter; import org.ihtsdo.otf.mapping.helpers.PfsParameterJpa; import org.ihtsdo.otf.mapping.helpers.ProjectSpecificAlgorithmHandler; import org.ihtsdo.otf.mapping.helpers.SearchResult; import org.ihtsdo.otf.mapping.helpers.SearchResultJpa; import org.ihtsdo.otf.mapping.helpers.SearchResultList; import org.ihtsdo.otf.mapping.helpers.SearchResultListJpa; import org.ihtsdo.otf.mapping.helpers.TreePositionList; import org.ihtsdo.otf.mapping.helpers.TreePositionListJpa; import org.ihtsdo.otf.mapping.helpers.UserErrorList; import org.ihtsdo.otf.mapping.helpers.UserErrorListJpa; import org.ihtsdo.otf.mapping.helpers.WorkflowAction; import org.ihtsdo.otf.mapping.helpers.WorkflowPath; import org.ihtsdo.otf.mapping.helpers.WorkflowStatus; import org.ihtsdo.otf.mapping.jpa.MapAdviceJpa; import org.ihtsdo.otf.mapping.jpa.MapAgeRangeJpa; import org.ihtsdo.otf.mapping.jpa.MapEntryJpa; import org.ihtsdo.otf.mapping.jpa.MapPrincipleJpa; import org.ihtsdo.otf.mapping.jpa.MapProjectJpa; import org.ihtsdo.otf.mapping.jpa.MapRecordJpa; import org.ihtsdo.otf.mapping.jpa.MapRelationJpa; import org.ihtsdo.otf.mapping.jpa.MapUserJpa; import org.ihtsdo.otf.mapping.jpa.MapUserPreferencesJpa; import org.ihtsdo.otf.mapping.model.MapAdvice; import org.ihtsdo.otf.mapping.model.MapAgeRange; import org.ihtsdo.otf.mapping.model.MapEntry; import org.ihtsdo.otf.mapping.model.MapNote; import org.ihtsdo.otf.mapping.model.MapPrinciple; import org.ihtsdo.otf.mapping.model.MapProject; import org.ihtsdo.otf.mapping.model.MapRecord; import org.ihtsdo.otf.mapping.model.MapRelation; import org.ihtsdo.otf.mapping.model.MapUser; import org.ihtsdo.otf.mapping.model.MapUserPreferences; import org.ihtsdo.otf.mapping.model.UserError; import org.ihtsdo.otf.mapping.rf2.ComplexMapRefSetMember; import org.ihtsdo.otf.mapping.rf2.Concept; import org.ihtsdo.otf.mapping.rf2.TreePosition; import org.ihtsdo.otf.mapping.services.ContentService; import org.ihtsdo.otf.mapping.services.MappingService; import org.ihtsdo.otf.mapping.services.MetadataService; import org.ihtsdo.otf.mapping.services.WorkflowService; import org.ihtsdo.otf.mapping.workflow.TrackingRecord; /** * JPA implementation of the {@link MappingService}. */ public class MappingServiceJpa extends RootServiceJpa implements MappingService { /** * Instantiates an empty {@link MappingServiceJpa}. * * @throws Exception */ public MappingServiceJpa() throws Exception { super(); } /** * Close the manager when done with this service. * * @throws Exception * the exception */ @Override public void close() throws Exception { if (manager.isOpen()) { manager.close(); } } // MapProject // - getMapProjects // - getMapProject(Long id) // - getMapProject(String name) // - findMapProjects(String query) // - addMapProject(MapProject mapProject) // - updateMapProject(MapProject mapProject) // - removeMapProject(MapProject mapProject) /** * Return map project for auto-generated id. * * @param id * the auto-generated id * @return the MapProject */ @Override public MapProject getMapProject(Long id) throws Exception { MapProject m = null; javax.persistence.Query query = manager .createQuery("select m from MapProjectJpa m where id = :id"); query.setParameter("id", id); m = (MapProject) query.getSingleResult(); m.getScopeConcepts().size(); m.getScopeExcludedConcepts().size(); m.getMapAdvices().size(); m.getMapRelations().size(); m.getMapLeads().size(); m.getMapSpecialists().size(); m.getMapPrinciples().size(); m.getPresetAgeRanges().size(); return m; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#getMapProjectForRefSetId * (java.lang.String) */ @Override public MapProject getMapProjectForRefSetId(String refSetId) throws Exception { MapProject m = null; javax.persistence.Query query = manager.createQuery( "select m from MapProjectJpa m where refSetId = :refSetId") .setParameter("refSetId", refSetId); m = (MapProject) query.getSingleResult(); m.getScopeConcepts().size(); m.getScopeExcludedConcepts().size(); m.getMapAdvices().size(); m.getMapRelations().size(); m.getMapLeads().size(); m.getMapSpecialists().size(); m.getMapPrinciples().size(); m.getPresetAgeRanges().size(); return m; } /** * Retrieve all map projects. * * @return a List of MapProjects */ @Override @SuppressWarnings("unchecked") public MapProjectList getMapProjects() { List<MapProject> mapProjects = null; // construct query javax.persistence.Query query = manager .createQuery("select m from MapProjectJpa m"); mapProjects = query.getResultList(); // force instantiation of lazy collections for (MapProject mapProject : mapProjects) { handleMapProjectLazyInitialization(mapProject); } MapProjectListJpa mapProjectList = new MapProjectListJpa(); mapProjectList.setMapProjects(mapProjects); mapProjectList.setTotalCount(mapProjects.size()); return mapProjectList; } /** * Query for MapProjects. * * @param query * the query * @param pfsParameter * the pfs parameter * @return the list of MapProject * @throws Exception * the exception */ @Override @SuppressWarnings("unchecked") public SearchResultList findMapProjectsForQuery(String query, PfsParameter pfsParameter) throws Exception { SearchResultList s = new SearchResultListJpa(); FullTextEntityManager fullTextEntityManager = Search .getFullTextEntityManager(manager); SearchFactory searchFactory = fullTextEntityManager.getSearchFactory(); Query luceneQuery; // construct luceneQuery based on URL format if (query.indexOf(':') == -1) { // no fields indicated MultiFieldQueryParser queryParser = new MultiFieldQueryParser( Version.LUCENE_36, fieldNames.toArray(new String[0]), searchFactory.getAnalyzer(MapProjectJpa.class)); queryParser.setAllowLeadingWildcard(false); luceneQuery = queryParser.parse(query); } else { // field:value QueryParser queryParser = new QueryParser(Version.LUCENE_36, "summary", searchFactory.getAnalyzer(MapProjectJpa.class)); luceneQuery = queryParser.parse(query); } List<MapProject> m = fullTextEntityManager.createFullTextQuery( luceneQuery, MapProjectJpa.class).getResultList(); Logger.getLogger(this.getClass()).debug( Integer.toString(m.size()) + " map projects retrieved"); for (MapProject mp : m) { s.addSearchResult(new SearchResultJpa(mp.getId(), mp.getRefSetId() .toString(), mp.getName())); } // Sort by ID s.sortBy(new Comparator<SearchResult>() { @Override public int compare(SearchResult o1, SearchResult o2) { return o1.getId().compareTo(o2.getId()); } }); fullTextEntityManager.close(); // closing fullTextEntityManager also closes manager, recreate manager = factory.createEntityManager(); return s; } /** * Add a map project. * * @param mapProject * the map project * @return the map project */ @Override public MapProject addMapProject(MapProject mapProject) throws Exception { // check that each user has only one role validateUserAndRole(mapProject); if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); manager.persist(mapProject); tx.commit(); return mapProject; } else { if (!tx.isActive()) { throw new IllegalStateException( "Error attempting to change data without an active transaction"); } manager.persist(mapProject); return mapProject; } } /** * Update a map project. * * @param mapProject * the changed map project */ @Override public void updateMapProject(MapProject mapProject) throws Exception { // check that each user has only one role validateUserAndRole(mapProject); if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); manager.merge(mapProject); tx.commit(); } else { manager.merge(mapProject); } } /** * Remove (delete) a map project. * * @param mapProjectId * the map project to be removed */ @Override public void removeMapProject(Long mapProjectId) { if (getTransactionPerOperation()) { tx = manager.getTransaction(); // first, remove the leads and specialists from this project tx.begin(); MapProject mp = manager.find(MapProjectJpa.class, mapProjectId); mp.setMapLeads(null); mp.setMapSpecialists(null); tx.commit(); // now remove the entry tx.begin(); if (manager.contains(mp)) { manager.remove(mp); } else { manager.remove(manager.merge(mp)); } tx.commit(); } else { MapProject mp = manager.find(MapProjectJpa.class, mapProjectId); mp.setMapLeads(null); mp.setMapSpecialists(null); if (manager.contains(mp)) { manager.remove(mp); } else { manager.remove(manager.merge(mp)); } } } // MapUser // - getMapUsers() // - getMapProjectsForUser(MapUser mapUser) // - findMapUsers(String query) // - addMapUser(MapUser mapUser) // - updateMapUser(MapUser mapUser) // - removeMapUser(Long id) /** * Retrieve all map users. * * @return a List of MapUsers */ @Override @SuppressWarnings("unchecked") public MapUserList getMapUsers() { List<MapUser> m = null; javax.persistence.Query query = manager .createQuery("select m from MapUserJpa m"); m = query.getResultList(); MapUserListJpa mapUserList = new MapUserListJpa(); mapUserList.setMapUsers(m); mapUserList.setTotalCount(m.size()); return mapUserList; } /** * Return map specialist for auto-generated id. * * @param id * the auto-generated id * @return the MapSpecialist */ @Override public MapUser getMapUser(Long id) throws Exception { javax.persistence.Query query = manager .createQuery("select m from MapUserJpa m where id = :id"); query.setParameter("id", id); return (MapUser) query.getSingleResult(); } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#getMapUser(java.lang.String * ) */ @Override public MapUser getMapUser(String userName) throws Exception { javax.persistence.Query query = manager .createQuery("select m from MapUserJpa m where userName = :userName"); query.setParameter("userName", userName); return (MapUser) query.getSingleResult(); } /** * Retrieve all map projects assigned to a particular map specialist. * * @param mapUser * the map user * @return a List of MapProjects */ @Override public MapProjectList getMapProjectsForMapUser(MapUser mapUser) { MapProjectList mpList = getMapProjects(); List<MapProject> mpListReturn = new ArrayList<>(); // iterate and check for presence of mapUser as specialist for (MapProject mp : mpList.getMapProjects()) { for (MapUser ms : mp.getMapSpecialists()) { if (ms.equals(mapUser)) { mpListReturn.add(mp); } } for (MapUser ms : mp.getMapLeads()) { if (ms.equals(mapUser)) { mpListReturn.add(mp); } } } // force instantiation of lazy collections for (MapProject mapProject : mpListReturn) { handleMapProjectLazyInitialization(mapProject); } MapProjectListJpa mapProjectList = new MapProjectListJpa(); mapProjectList.setMapProjects(mpListReturn); mapProjectList.setTotalCount(mpListReturn.size()); return mapProjectList; } /** * Update a map specialist. * * @param mapUser * the changed map user */ @Override public void updateMapUser(MapUser mapUser) { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); manager.merge(mapUser); tx.commit(); } else { manager.merge(mapUser); } } /** * Remove (delete) a map specialist. * * @param mapUserId * the map user to be removed */ @Override public void removeMapUser(Long mapUserId) { tx = manager.getTransaction(); // retrieve this map specialist MapUser mu = manager.find(MapUserJpa.class, mapUserId); // retrieve all projects on which this specialist appears List<MapProject> projects = getMapProjectsForMapUser(mu) .getMapProjects(); if (getTransactionPerOperation()) { // remove specialist from all these projects tx.begin(); for (MapProject mp : projects) { mp.removeMapLead(mu); mp.removeMapSpecialist(mu); manager.merge(mp); } tx.commit(); // remove specialist tx.begin(); if (manager.contains(mu)) { manager.remove(mu); } else { manager.remove(manager.merge(mu)); } tx.commit(); } else { for (MapProject mp : projects) { mp.removeMapLead(mu); mp.removeMapSpecialist(mu); manager.merge(mp); } if (manager.contains(mu)) { manager.remove(mu); } else { manager.remove(manager.merge(mu)); } } } /** * Add a map lead. * * @param mapUser * the map lead * @return the map user */ @Override public MapUser addMapUser(MapUser mapUser) { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); manager.persist(mapUser); tx.commit(); } else { manager.persist(mapUser); } return mapUser; } // MapRecord /** * Retrieve all map records. * * @return a List of MapRecords */ @Override @SuppressWarnings("unchecked") public MapRecordList getMapRecords() { List<MapRecord> mapRecords = null; // construct query javax.persistence.Query query = manager .createQuery("select m from MapRecordJpa m"); // Try query mapRecords = query.getResultList(); MapRecordListJpa mapRecordList = new MapRecordListJpa(); mapRecordList.setMapRecords(mapRecords); mapRecordList.setTotalCount(mapRecords.size()); return mapRecordList; } /** * Retrieve map record for given id. * * @param id * the map record id * @return the map record */ @Override public MapRecord getMapRecord(Long id) throws Exception { /* * Try to retrieve the single expected result If zero or more than one * result are returned, log error and set result to null */ MapRecord mapRecord = manager.find(MapRecordJpa.class, id); if (mapRecord != null) handleMapRecordLazyInitialization(mapRecord); Logger.getLogger(this.getClass()).debug( "Returning record_id... " + ((mapRecord != null) ? mapRecord.getId().toString() : "null")); return mapRecord; } /** * Retrieve map records for a lucene query. * * @param query * the lucene query string * @param pfsParameter * the pfs parameter * @return a list of map records * @throws Exception * the exception */ @Override @SuppressWarnings("unchecked") public SearchResultList findMapRecordsForQuery(String query, PfsParameter pfsParameter) throws Exception { SearchResultList s = new SearchResultListJpa(); FullTextEntityManager fullTextEntityManager = Search .getFullTextEntityManager(manager); SearchFactory searchFactory = fullTextEntityManager.getSearchFactory(); Query luceneQuery; // construct luceneQuery based on URL format if (query.indexOf(':') == -1) { // no fields indicated MultiFieldQueryParser queryParser = new MultiFieldQueryParser( Version.LUCENE_36, fieldNames.toArray(new String[0]), searchFactory.getAnalyzer(MapRecordJpa.class)); queryParser.setAllowLeadingWildcard(false); luceneQuery = queryParser.parse(query); } else { // field:value QueryParser queryParser = new QueryParser(Version.LUCENE_36, "summary", searchFactory.getAnalyzer(MapRecordJpa.class)); luceneQuery = queryParser.parse(query); } List<MapRecord> mapRecords = fullTextEntityManager.createFullTextQuery( luceneQuery, MapRecordJpa.class).getResultList(); Logger.getLogger(this.getClass()).debug( Integer.toString(mapRecords.size()) + " map records retrieved"); for (MapRecord mapRecord : mapRecords) { s.addSearchResult(new SearchResultJpa(mapRecord.getId(), mapRecord .getConceptId().toString(), mapRecord.getConceptName())); } // Sort by ID s.sortBy(new Comparator<SearchResult>() { @Override public int compare(SearchResult o1, SearchResult o2) { return o1.getId().compareTo(o2.getId()); } }); fullTextEntityManager.close(); // closing fullTextEntityManager also closes manager, recreate manager = factory.createEntityManager(); return s; /* * for (MapRecord mr : m) { if (pfsParameter == null || * pfsParameter.isIndexInRange(i++)) { s.addSearchResult(new * SearchResultJpa(mr.getId(), "", mr.getConceptId())); } } */ } /** * Add a map record. * * @param mapRecord * the map record to be added * @return the map record * @throws Exception * the exception */ @Override public MapRecord addMapRecord(MapRecord mapRecord) throws Exception { // check if user valid if (mapRecord.getOwner() == null) { throw new Exception("Map Record requires valid user in owner field"); } if (mapRecord.getLastModifiedBy() == null) { throw new Exception( "Map Record requires valid user in lastModifiedBy field"); } // set the map record of all elements of this record mapRecord.assignToChildren(); if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); manager.persist(mapRecord); tx.commit(); return mapRecord; } else { manager.persist(mapRecord); return mapRecord; } } /** * Update a map record. * * @param mapRecord * the map record to be updated */ @Override public void updateMapRecord(MapRecord mapRecord) { // update last modified timestamp mapRecord.setLastModified((new java.util.Date()).getTime()); // first assign the map record to its children mapRecord.assignToChildren(); if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); manager.merge(mapRecord); tx.commit(); } else { manager.merge(mapRecord); } } /** * Remove (delete) a map record by id. * * @param id * the id of the map record to be removed */ @Override public void removeMapRecord(Long id) { tx = manager.getTransaction(); // find the map record MapRecord m = manager.find(MapRecordJpa.class, id); if (getTransactionPerOperation()) { // delete the map record tx.begin(); if (manager.contains(m)) { manager.remove(m); } else { manager.remove(manager.merge(m)); } tx.commit(); } else { if (manager.contains(m)) { manager.remove(m); } else { manager.remove(manager.merge(m)); } } } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#getMapRecordRevisions( * java.lang.Long) */ @SuppressWarnings("unchecked") @Override public MapRecordList getMapRecordRevisions(Long mapRecordId) { AuditReader reader = AuditReaderFactory.get(manager); List<MapRecord> revisions = reader.createQuery() // all revisions, returned as objects, not finding deleted entries .forRevisionsOfEntity(MapRecordJpa.class, true, false) // search by id .add(AuditEntity.id().eq(mapRecordId)) // order by descending timestamp .addOrder(AuditEntity.property("timestamp").desc()) // execute query .getResultList(); // construct the map MapRecordListJpa mapRecordList = new MapRecordListJpa(); mapRecordList.setMapRecords(revisions); for (MapRecord mapRecord : revisions) { handleMapRecordLazyInitialization(mapRecord); } mapRecordList.setTotalCount(revisions.size()); return mapRecordList; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#getRecentlyEditedMapRecords * (org.ihtsdo.otf.mapping.model.MapUser) */ @SuppressWarnings({ "unchecked" }) @Override public MapRecordList getRecentlyEditedMapRecords(Long projectId, String userName, PfsParameter pfsParameter) throws Exception { MapUser user = getMapUser(userName); AuditReader reader = AuditReaderFactory.get(manager); PfsParameter localPfsParameter = pfsParameter; // if no pfsParameter supplied, construct a default one if (localPfsParameter == null) localPfsParameter = new PfsParameterJpa(); // split the query restrictions if (localPfsParameter.getQueryRestriction() != null) { // do nothing } // construct the query AuditQuery query = reader.createQuery() // all revisions, returned as objects, finding deleted entries .forRevisionsOfEntity(MapRecordJpa.class, true, true) // add mapProjectId and owner as constraints .add(AuditEntity.property("mapProjectId").eq(projectId)) .add(AuditEntity.relatedId("lastModifiedBy").eq(user.getId())) // exclude records with workflow status NEW .add(AuditEntity.property("workflowStatus").ne( WorkflowStatus.NEW)); // if sort field specified if (localPfsParameter.getSortField() != null) { query.addOrder(AuditEntity.property( localPfsParameter.getSortField()).desc()); // otherwise, sort by last modified (descending) } else { query.addOrder(AuditEntity.property("lastModified").desc()); } // if paging request supplied, set first result and max results if (localPfsParameter.getStartIndex() != -1 && localPfsParameter.getMaxResults() != -1) { query.setFirstResult(localPfsParameter.getStartIndex()) .setMaxResults(localPfsParameter.getMaxResults()); } // if query terms specified, add if (pfsParameter.getQueryRestriction() != null) { String[] queryTerms = pfsParameter.getQueryRestriction().split(" "); query.add(AuditEntity.or( AuditEntity.property("conceptId").in(queryTerms), AuditEntity.property("conceptName").like( pfsParameter.getQueryRestriction(), MatchMode.ANYWHERE))); } // execute the query List<MapRecord> editedRecords = query.getResultList(); // create the mapRecordList and set total size MapRecordListJpa mapRecordList = new MapRecordListJpa(); // mapRecordList.setTotalCount(editedRecords.size()); // only add one copy // TODO Decide whether or not to requery to get a full page of 10 List<MapRecord> uniqueRecords = new ArrayList<>(); for (MapRecord mapRecord : editedRecords) { boolean recordExists = false; for (MapRecord mr : uniqueRecords) { if (mr.getId().equals(mapRecord.getId())) recordExists = true; } if (recordExists == false) uniqueRecords.add(mapRecord); } // handle all lazy initializations for (MapRecord mapRecord : uniqueRecords) { handleMapRecordLazyInitialization(mapRecord); } mapRecordList.setMapRecords(uniqueRecords); return mapRecordList; } // Other query services // Descendant services /** * Retrieve map records for a given terminology id. * * @param terminologyId * the concept id * @return the list of map records */ @SuppressWarnings("unchecked") @Override public MapRecordList getMapRecordsForConcept(String terminologyId) { List<MapRecord> mapRecords = null; // construct query javax.persistence.Query query = manager .createQuery("select m from MapRecordJpa m where conceptId = :conceptId"); // Try query query.setParameter("conceptId", terminologyId); mapRecords = query.getResultList(); for (MapRecord mapRecord : mapRecords) { handleMapRecordLazyInitialization(mapRecord); } MapRecordListJpa mapRecordList = new MapRecordListJpa(); mapRecordList.setMapRecords(mapRecords); mapRecordList.setTotalCount(mapRecords.size()); return mapRecordList; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#getMapRecordsForMapProject * (java.lang.Long) */ @SuppressWarnings("unchecked") @Override public MapRecordList getMapRecordsForMapProject(Long mapProjectId) throws Exception { javax.persistence.Query query = manager .createQuery( "select m from MapRecordJpa m where mapProjectId = :mapProjectId") .setParameter("mapProjectId", mapProjectId); MapRecordList mapRecordList = new MapRecordListJpa(); mapRecordList.setMapRecords(query.getResultList()); return mapRecordList; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#getMapRecordsForMapProject * (java.lang.Long, org.ihtsdo.otf.mapping.helpers.PfsParameter) */ @Override @SuppressWarnings("unchecked") public MapRecordList getPublishedAndReadyForPublicationMapRecordsForMapProject( Long mapProjectId, PfsParameter pfsParameter) throws Exception { // construct basic query String full_query = constructMapRecordForMapProjectIdQuery( mapProjectId, pfsParameter == null ? new PfsParameterJpa() : pfsParameter); full_query += " AND (workflowStatus:'PUBLISHED' OR workflowStatus:'READY_FOR_PUBLICATION')"; Logger.getLogger(MappingServiceJpa.class).info(full_query); FullTextEntityManager fullTextEntityManager = Search .getFullTextEntityManager(manager); SearchFactory searchFactory = fullTextEntityManager.getSearchFactory(); Query luceneQuery; // construct luceneQuery based on URL format QueryParser queryParser = new QueryParser(Version.LUCENE_36, "summary", searchFactory.getAnalyzer(MapRecordJpa.class)); luceneQuery = queryParser.parse(full_query); org.hibernate.search.jpa.FullTextQuery ftquery = fullTextEntityManager .createFullTextQuery(luceneQuery, MapRecordJpa.class); // Sort Options -- in order of priority // (1) if a sort field is specified by pfs parameter, use it // (2) if a query has been specified, use nothing (lucene relevance // default) // (3) if a query has not been specified, sort by conceptId String sortField = "conceptId"; if (pfsParameter != null && pfsParameter.getSortField() != null && !pfsParameter.getSortField().isEmpty()) { ftquery.setSort(new Sort(new SortField(pfsParameter.getSortField(), SortField.STRING))); } else if (pfsParameter != null && pfsParameter.getQueryRestriction() != null && !pfsParameter.getQueryRestriction().isEmpty()) { // do nothing } else { ftquery.setSort(new Sort(new SortField(sortField, SortField.STRING))); } // get the results int totalCount = ftquery.getResultSize(); if (pfsParameter != null) { ftquery.setFirstResult(pfsParameter.getStartIndex()); ftquery.setMaxResults(pfsParameter.getMaxResults()); } List<MapRecord> mapRecords = ftquery.getResultList(); Logger.getLogger(this.getClass()).debug( Integer.toString(mapRecords.size()) + " records retrieved"); for (MapRecord mapRecord : mapRecords) { handleMapRecordLazyInitialization(mapRecord); } // set the total count MapRecordListJpa mapRecordList = new MapRecordListJpa(); mapRecordList.setTotalCount(totalCount); // extract the required sublist of map records mapRecordList.setMapRecords(mapRecords); return mapRecordList; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#getMapRecordsForMapProject * (java.lang.Long, org.ihtsdo.otf.mapping.helpers.PfsParameter) */ @Override @SuppressWarnings("unchecked") public MapRecordList getPublishedMapRecordsForMapProject(Long mapProjectId, PfsParameter pfsParameter) throws Exception { // construct basic query String full_query = constructMapRecordForMapProjectIdQuery( mapProjectId, pfsParameter == null ? new PfsParameterJpa() : pfsParameter); full_query += " AND workflowStatus:'PUBLISHED'"; Logger.getLogger(MappingServiceJpa.class).info(full_query); FullTextEntityManager fullTextEntityManager = Search .getFullTextEntityManager(manager); SearchFactory searchFactory = fullTextEntityManager.getSearchFactory(); Query luceneQuery; // construct luceneQuery based on URL format QueryParser queryParser = new QueryParser(Version.LUCENE_36, "summary", searchFactory.getAnalyzer(MapRecordJpa.class)); luceneQuery = queryParser.parse(full_query); org.hibernate.search.jpa.FullTextQuery ftquery = fullTextEntityManager .createFullTextQuery(luceneQuery, MapRecordJpa.class); // Sort Options -- in order of priority // (1) if a sort field is specified by pfs parameter, use it // (2) if a query has been specified, use nothing (lucene relevance // default) // (3) if a query has not been specified, sort by conceptId String sortField = "conceptId"; if (pfsParameter != null && pfsParameter.getSortField() != null && !pfsParameter.getSortField().isEmpty()) { ftquery.setSort(new Sort(new SortField(pfsParameter.getSortField(), SortField.STRING))); } else if (pfsParameter != null && pfsParameter.getQueryRestriction() != null && !pfsParameter.getQueryRestriction().isEmpty()) { // do nothing } else { ftquery.setSort(new Sort(new SortField(sortField, SortField.STRING))); } // get the results int totalCount = ftquery.getResultSize(); if (pfsParameter != null) { ftquery.setFirstResult(pfsParameter.getStartIndex()); ftquery.setMaxResults(pfsParameter.getMaxResults()); } List<MapRecord> mapRecords = ftquery.getResultList(); Logger.getLogger(this.getClass()).debug( Integer.toString(mapRecords.size()) + " records retrieved"); for (MapRecord mapRecord : mapRecords) { handleMapRecordLazyInitialization(mapRecord); } // set the total count MapRecordListJpa mapRecordList = new MapRecordListJpa(); mapRecordList.setTotalCount(totalCount); // extract the required sublist of map records mapRecordList.setMapRecords(mapRecords); return mapRecordList; } /** * Helper function for map record query construction using both fielded * terms and unfielded terms. * * @param mapProjectId * the map project id for which queries are retrieved * @param pfsParameter * the pfs parameter * @return the full lucene query text */ private static String constructMapRecordForMapProjectIdQuery( Long mapProjectId, PfsParameter pfsParameter) { String full_query; // if no filter supplied, return query based on map project id only if (pfsParameter.getQueryRestriction() == null || pfsParameter.getQueryRestriction().equals("")) { full_query = "mapProjectId:" + mapProjectId; return full_query; } // Pre-treatment: Find any lower-case boolean operators and set to // uppercase // Basic algorithm: // 1) add whitespace breaks to operators // 2) split query on whitespace // 3) cycle over terms in split query to find quoted material, add each // term/quoted term to parsed terms\ // a) special case: quoted term after a : // 3) cycle over terms in parsed terms // a) if an operator/parantheses, pass through unchanged (send to upper // case // for boolean) // b) if a fielded query (i.e. field:value), pass through unchanged // c) if not, construct query on all fields with this term // list of escape terms (i.e. quotes, operators) to be fed into query // untouched String escapeTerms = "\\+|\\-|\"|\\(|\\)"; String booleanTerms = "and|AND|or|OR|not|NOT"; // first cycle over the string to add artificial breaks before and after // control characters final String queryStr = (pfsParameter == null ? "" : pfsParameter .getQueryRestriction()); String queryStr_mod = queryStr; queryStr_mod = queryStr_mod.replace("(", " ( "); queryStr_mod = queryStr_mod.replace(")", " ) "); queryStr_mod = queryStr_mod.replace("\"", " \" "); queryStr_mod = queryStr_mod.replace("+", " + "); queryStr_mod = queryStr_mod.replace("-", " - "); // remove any leading or trailing whitespace (otherwise first/last null // term // bug) queryStr_mod = queryStr_mod.trim(); // split the string by white space and single-character operators String[] terms = queryStr_mod.split("\\s+"); // merge items between quotation marks boolean exprInQuotes = false; List<String> parsedTerms = new ArrayList<>(); // List<String> parsedTerms_temp = new ArrayList<String>(); String currentTerm = ""; // cycle over terms to identify quoted (i.e. non-parsed) terms for (int i = 0; i < terms.length; i++) { // if an open quote is detected if (terms[i].equals("\"")) { if (exprInQuotes == true) { // special case check: fielded term. Impossible for first // term to be // fielded. if (parsedTerms.size() == 0) { parsedTerms.add("\"" + currentTerm + "\""); } else { String lastParsedTerm = parsedTerms.get(parsedTerms .size() - 1); // if last parsed term ended with a colon, append this // term to the // last parsed term if (lastParsedTerm.endsWith(":") == true) { parsedTerms.set(parsedTerms.size() - 1, lastParsedTerm + "\"" + currentTerm + "\""); } else { parsedTerms.add("\"" + currentTerm + "\""); } } // reset current term currentTerm = ""; exprInQuotes = false; } else { exprInQuotes = true; } // if no quote detected } else { // if inside quotes, continue building term if (exprInQuotes == true) { currentTerm = currentTerm == "" ? terms[i] : currentTerm + " " + terms[i]; // otherwise, add to parsed list } else { parsedTerms.add(terms[i]); } } } for (String s : parsedTerms) { Logger.getLogger(MappingServiceJpa.class).debug(" " + s); } // cycle over terms to construct query full_query = ""; for (int i = 0; i < parsedTerms.size(); i++) { // if not the first term AND the last term was not an escape term // add whitespace separator if (i != 0 && !parsedTerms.get(i - 1).matches(escapeTerms)) { full_query += " "; } /* * full_query += (i == 0 ? // check for first term "" : // -> if * first character, add nothing * parsedTerms.get(i-1).matches(escapeTerms) ? // check if last term * was an escape character "": // -> if last term was an escape * character, add nothing " "); // -> otherwise, add a separating * space */ // if an escape character/sequence, add this term unmodified if (parsedTerms.get(i).matches(escapeTerms)) { full_query += parsedTerms.get(i); // else if a boolean character, add this term in upper-case form // lucene format) } else if (parsedTerms.get(i).matches(booleanTerms)) { full_query += parsedTerms.get(i).toUpperCase(); // else if already a field-specific query term, add this term // unmodified } else if (parsedTerms.get(i).contains(":")) { full_query += parsedTerms.get(i); // otherwise, treat as unfielded query term } else { // open parenthetical term full_query += "("; // add fielded query for each indexed term, separated by OR Iterator<String> names_iter = fieldNames.iterator(); while (names_iter.hasNext()) { full_query += names_iter.next() + ":" + parsedTerms.get(i); if (names_iter.hasNext()) full_query += " OR "; } // close parenthetical term full_query += ")"; } // if further terms remain in the sequence if (!(i == parsedTerms.size() - 1)) { // Add a separating OR iff: // - this term is not an escape character // - this term is not a boolean term // - next term is not a boolean term if (!parsedTerms.get(i).matches(escapeTerms) && !parsedTerms.get(i).matches(booleanTerms) && !parsedTerms.get(i + 1).matches(booleanTerms)) { full_query += " OR"; } } } // add parantheses and map project constraint full_query = "(" + full_query + ")" + " AND mapProjectId:" + mapProjectId; Logger.getLogger(MappingServiceJpa.class).debug( "Full query: " + full_query); return full_query; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#findConceptsInScope(org. * ihtsdo.otf.mapping.model.MapProject) */ @Override public SearchResultList findConceptsInScope(Long mapProjectId, PfsParameter pfsParameter) throws Exception { Logger.getLogger(this.getClass()).info( "Find concepts in scope for " + mapProjectId); MapProject project = getMapProject(mapProjectId); SearchResultList conceptsInScope = new SearchResultListJpa(); ContentService contentService = new ContentServiceJpa(); String terminology = project.getSourceTerminology(); String terminologyVersion = project.getSourceTerminologyVersion(); // Avoid including the scope concepts themselves in the definition // if we are looking for descendants // e.g. "Clinical Finding" does not need to be mapped for SNOMED->ICD10 if (!project.isScopeDescendantsFlag()) { Logger.getLogger(this.getClass()).info( " Project not using scope descendants flag - " + project.getScopeConcepts()); for (String conceptId : project.getScopeConcepts()) { Concept c = contentService.getConcept(conceptId, terminology, terminologyVersion); if (c == null) { throw new Exception("Scope concept " + conceptId + " does not exist."); } SearchResult sr = new SearchResultJpa(); sr.setId(c.getId()); sr.setTerminologyId(c.getTerminologyId()); sr.setTerminology(c.getTerminology()); sr.setTerminologyVersion(c.getTerminologyVersion()); sr.setValue(c.getDefaultPreferredName()); conceptsInScope.addSearchResult(sr); } } // Include descendants in scope. if (project.isScopeDescendantsFlag()) { Logger.getLogger(this.getClass()).info( " Project using scope descendants flag"); // for each scope concept, get descendants for (String terminologyId : project.getScopeConcepts()) { SearchResultList descendants = contentService .findDescendantConcepts(terminologyId, terminology, terminologyVersion, pfsParameter); Logger.getLogger(this.getClass()).info( " Concept " + terminologyId + " has " + descendants.getTotalCount() + " descendants (" + descendants.getCount() + " from getCount)"); // cycle over descendants for (SearchResult sr : descendants.getSearchResults()) { conceptsInScope.addSearchResult(sr); } } } contentService.close(); // get those excluded from scope SearchResultList excludedResultList = findConceptsExcludedFromScope( mapProjectId, pfsParameter); // remove those excluded from scope SearchResultList finalConceptsInScope = new SearchResultListJpa(); for (SearchResult sr : conceptsInScope.getSearchResults()) { if (!excludedResultList.contains(sr)) { finalConceptsInScope.addSearchResult(sr); } } finalConceptsInScope.setTotalCount(finalConceptsInScope.getCount()); Logger.getLogger(this.getClass()).info( "Finished getting scope concepts - " + finalConceptsInScope.getTotalCount()); /** * PrintWriter writer = new PrintWriter("C:/data/inScopeConcepts.txt", * "UTF-8"); for(SearchResult sr : * finalConceptsInScope.getSearchResults()) { * writer.println(sr.getTerminologyId()); } writer.close(); */ return finalConceptsInScope; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#findUnmappedConceptsInScope * (org.ihtsdo.otf.mapping.model.MapProject) */ @Override public SearchResultList findUnmappedConceptsInScope(Long mapProjectId, PfsParameter pfsParameter) throws Exception { Logger.getLogger(this.getClass()).info( "Find unmapped concepts in scope for " + mapProjectId); // Get in scope concepts SearchResultList conceptsInScope = findConceptsInScope(mapProjectId, pfsParameter); Logger.getLogger(this.getClass()).info( " Project has " + conceptsInScope.getTotalCount() + " concepts in scope"); // Look for concept ids that have publication records in the current // project Set<String> mappedConcepts = new HashSet<>(); for (MapRecord mapRecord : getMapRecordsForMapProject(mapProjectId) .getIterable()) { if (mapRecord.getWorkflowStatus().equals(WorkflowStatus.PUBLISHED) || mapRecord.getWorkflowStatus().equals( WorkflowStatus.READY_FOR_PUBLICATION)) { mappedConcepts.add(mapRecord.getConceptId()); } } // Keep any search results that do not have mapped concepts ContentService contentService = new ContentServiceJpa(); SearchResultList unmappedConceptsInScope = new SearchResultListJpa(); for (SearchResult sr : conceptsInScope.getSearchResults()) { if (!mappedConcepts.contains(sr.getTerminologyId())) { unmappedConceptsInScope.addSearchResult(sr); } } unmappedConceptsInScope.setTotalCount(unmappedConceptsInScope .getCount()); contentService.close(); Logger.getLogger(this.getClass()).info( " Project has " + unmappedConceptsInScope.getTotalCount() + " unmapped concepts in scope"); return unmappedConceptsInScope; } /* * (non-Javadoc) * * @see org.ihtsdo.otf.mapping.services.MappingService# * findMappedConceptsOutOfScopeBounds(java.lang.Long) */ @Override public SearchResultList findMappedConceptsOutOfScopeBounds( Long mapProjectId, PfsParameter pfsParameter) throws Exception { SearchResultList mappedConceptsOutOfBounds = new SearchResultListJpa(); MapProject project = getMapProject(mapProjectId); List<MapRecord> mapRecordList = getMapRecordsForMapProject(mapProjectId) .getMapRecords(); ContentService contentService = new ContentServiceJpa(); for (MapRecord record : mapRecordList) { Concept c = contentService.getConcept(record.getConceptId(), project.getSourceTerminology(), project.getSourceTerminologyVersion()); if (isConceptOutOfScopeBounds(c.getTerminologyId(), mapProjectId)) { SearchResult sr = new SearchResultJpa(); sr.setId(c.getId()); sr.setTerminologyId(c.getTerminologyId()); sr.setTerminology(c.getTerminology()); sr.setTerminologyVersion(c.getTerminologyVersion()); sr.setValue(c.getDefaultPreferredName()); mappedConceptsOutOfBounds.addSearchResult(sr); } } contentService.close(); return mappedConceptsOutOfBounds; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#findConceptsExcludedFromScope * (org.ihtsdo.otf.mapping.model.MapProject) */ @Override public SearchResultList findConceptsExcludedFromScope(Long mapProjectId, PfsParameter pfsParameter) throws Exception { SearchResultList conceptsExcludedFromScope = new SearchResultListJpa(); ContentService contentService = new ContentServiceJpa(); MapProject project = getMapProject(mapProjectId); String terminology = project.getSourceTerminology(); String terminologyVersion = project.getSourceTerminologyVersion(); // add specified excluded concepts for (String conceptId : project.getScopeExcludedConcepts()) { Concept c = contentService.getConcept(conceptId, terminology, terminologyVersion); if (c != null) { SearchResult sr = new SearchResultJpa(); sr.setId(c.getId()); sr.setTerminologyId(c.getTerminologyId()); sr.setTerminology(c.getTerminology()); sr.setTerminologyVersion(c.getTerminologyVersion()); sr.setValue(c.getDefaultPreferredName()); conceptsExcludedFromScope.addSearchResult(sr); } } // add descendant excluded concepts if indicated if (project.isScopeExcludedDescendantsFlag()) { // for each excluded scope concept, get descendants for (String terminologyId : project.getScopeExcludedConcepts()) { SearchResultList descendants = contentService .findDescendantConcepts(terminologyId, terminology, terminologyVersion, null); // cycle over descendants for (SearchResult sr : descendants.getSearchResults()) { conceptsExcludedFromScope.addSearchResult(sr); } } } contentService.close(); conceptsExcludedFromScope.setTotalCount(conceptsExcludedFromScope .getCount()); Logger.getLogger(this.getClass()).info( "Concepts excluded from scope " + +conceptsExcludedFromScope.getTotalCount()); return conceptsExcludedFromScope; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#isConceptInScope(org.ihtsdo * .otf.mapping.rf2.Concept, org.ihtsdo.otf.mapping.model.MapProject) */ @Override public boolean isConceptInScope(String conceptId, Long mapProjectId) throws Exception { MapProject project = getMapProject(mapProjectId); // if directly matches preset scope concept return true for (String c : project.getScopeConcepts()) { if (c.equals(conceptId)) return true; } // don't make contentService if no chance descendants meet conditions if (!project.isScopeDescendantsFlag() && !project.isScopeDescendantsFlag()) return false; ContentService contentService = new ContentServiceJpa(); for (TreePosition tp : contentService.getTreePositionsWithDescendants( conceptId, project.getSourceTerminology(), project.getSourceTerminologyVersion()).getIterable()) { String ancestorPath = tp.getAncestorPath(); if (project.isScopeExcludedDescendantsFlag() && ancestorPath.contains(conceptId)) { continue; } if (project.isScopeDescendantsFlag() && ancestorPath.contains(conceptId)) { contentService.close(); return true; } } contentService.close(); return false; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#isConceptExcludedFromScope * (org.ihtsdo.otf.mapping.rf2.Concept, * org.ihtsdo.otf.mapping.model.MapProject) */ @Override public boolean isConceptExcludedFromScope(String conceptId, Long mapProjectId) throws Exception { MapProject project = getMapProject(mapProjectId); // if directly matches preset scope concept return true for (String c : project.getScopeExcludedConcepts()) { if (c.equals(conceptId)) return true; } // don't make contentService if no chance descendants meet conditions if (!project.isScopeDescendantsFlag() && !project.isScopeDescendantsFlag()) return false; ContentService contentService = new ContentServiceJpa(); for (TreePosition tp : contentService.getTreePositionsWithDescendants( conceptId, project.getSourceTerminology(), project.getSourceTerminologyVersion()).getIterable()) { String ancestorPath = tp.getAncestorPath(); if (project.isScopeDescendantsFlag() && ancestorPath.contains(conceptId)) { continue; } if (project.isScopeExcludedDescendantsFlag() && ancestorPath.contains(conceptId)) { contentService.close(); return true; } } contentService.close(); return false; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#isConceptOutOfScopeBounds * (java.lang.String, java.lang.Long) */ @Override public boolean isConceptOutOfScopeBounds(String conceptId, Long mapProjectId) throws Exception { MapProject project = getMapProject(mapProjectId); // if directly matches preset scope concept return false for (String c : project.getScopeConcepts()) { if (c.equals(conceptId)) return false; } ContentService contentService = new ContentServiceJpa(); for (TreePosition tp : contentService.getTreePositionsWithDescendants( conceptId, project.getSourceTerminology(), project.getSourceTerminologyVersion()).getIterable()) { String ancestorPath = tp.getAncestorPath(); if (project.isScopeDescendantsFlag() && ancestorPath.contains(conceptId)) { return false; } } contentService.close(); return true; } /** * TODO: Is this used? Should require map project id * * Given a concept, returns a list of descendant concepts that have no * associated map record. * * @param terminologyId * the terminology id * @param terminology * the terminology * @param terminologyVersion * the terminology version * @param thresholdLlc * the maximum number of descendants a concept can have before it * is no longer considered a low-level concept (i.e. return an * empty list) * @return the list of unmapped descendant concepts * @throws Exception * the exception */ @Override public SearchResultList findUnmappedDescendantsForConcept( String terminologyId, String terminology, String terminologyVersion, int thresholdLlc, PfsParameter pfsParameter) throws Exception { SearchResultList unmappedDescendants = new SearchResultListJpa(); // get hierarchical rel MetadataService metadataService = new MetadataServiceJpa(); Map<String, String> hierarchicalRelationshipTypeMap = metadataService .getHierarchicalRelationshipTypes(terminology, terminologyVersion); if (hierarchicalRelationshipTypeMap.keySet().size() > 1) { throw new IllegalStateException( "Map project source terminology has too many hierarchical relationship types - " + terminology); } if (hierarchicalRelationshipTypeMap.keySet().size() < 1) { throw new IllegalStateException( "Map project source terminology has too few hierarchical relationship types - " + terminology); } // get descendants -- no pfsParameter, want all results ContentService contentService = new ContentServiceJpa(); SearchResultList descendants = contentService.findDescendantConcepts( terminologyId, terminology, terminologyVersion, null); // if number of descendants <= low-level concept threshold, treat as // high-level concept and report no unmapped if (descendants.getCount() <= thresholdLlc) { // cycle over descendants for (SearchResult sr : descendants.getSearchResults()) { // if descendant has no associated map records, add to list if (getMapRecordsForConcept(sr.getTerminologyId()) .getTotalCount() == 0) { unmappedDescendants.addSearchResult(sr); } } } // close managers contentService.close(); metadataService.close(); return unmappedDescendants; } // Services to be implemented // // Addition services /// /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#addMapPrinciple(org.ihtsdo * .otf.mapping.model.MapPrinciple) */ @Override public MapPrinciple addMapPrinciple(MapPrinciple mapPrinciple) { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); manager.persist(mapPrinciple); tx.commit(); } else { manager.persist(mapPrinciple); } return mapPrinciple; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#addMapAdvice(org.ihtsdo. * otf.mapping.model.MapAdvice) */ @Override public MapAdvice addMapAdvice(MapAdvice mapAdvice) { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); manager.persist(mapAdvice); tx.commit(); } else { manager.persist(mapAdvice); } return mapAdvice; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#addMapRelation(org.ihtsdo. * otf.mapping.model.MapRelation) */ @Override public MapRelation addMapRelation(MapRelation mapRelation) { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); manager.persist(mapRelation); tx.commit(); } else { manager.persist(mapRelation); } return mapRelation; } // Update services /// /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#updateMapPrinciple(org * .ihtsdo .otf.mapping.model.MapPrinciple) */ @Override public void updateMapPrinciple(MapPrinciple mapPrinciple) { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); manager.merge(mapPrinciple); tx.commit(); // manager.close(); } else { manager.merge(mapPrinciple); } } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#updateMapAdvice(org.ihtsdo * .otf.mapping.model.MapAdvice) */ @Override public void updateMapAdvice(MapAdvice mapAdvice) { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); manager.merge(mapAdvice); tx.commit(); // manager.close(); } else { manager.merge(mapAdvice); } } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#updateMapRelation(org. * ihtsdo .otf.mapping.model.MapRelation) */ @Override public void updateMapRelation(MapRelation mapRelation) { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); manager.merge(mapRelation); tx.commit(); // manager.close(); } else { manager.merge(mapRelation); } } // Removal services /// /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#removeMapPrinciple(java * .lang.Long) */ @Override public void removeMapPrinciple(Long mapPrincipleId) { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); MapPrinciple mp = manager.find(MapPrincipleJpa.class, mapPrincipleId); if (manager.contains(mp)) { manager.remove(mp); } else { manager.remove(manager.merge(mp)); } tx.commit(); } else { MapPrinciple mp = manager.find(MapPrincipleJpa.class, mapPrincipleId); if (manager.contains(mp)) { manager.remove(mp); } else { manager.remove(manager.merge(mp)); } } } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#removeMapAdvice(java.lang * .Long) */ @Override public void removeMapAdvice(Long mapAdviceId) { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); MapAdvice ma = manager.find(MapAdviceJpa.class, mapAdviceId); if (manager.contains(ma)) { manager.remove(ma); } else { manager.remove(manager.merge(ma)); } tx.commit(); } else { MapAdvice ma = manager.find(MapAdviceJpa.class, mapAdviceId); if (manager.contains(ma)) { manager.remove(ma); } else { manager.remove(manager.merge(ma)); } } } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#removeMapRelation(java * .lang.Long) */ @Override public void removeMapRelation(Long mapRelationId) { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); MapRelation ma = manager.find(MapRelationJpa.class, mapRelationId); if (manager.contains(ma)) { manager.remove(ma); } else { manager.remove(manager.merge(ma)); } tx.commit(); } else { MapRelation ma = manager.find(MapRelationJpa.class, mapRelationId); if (manager.contains(ma)) { manager.remove(ma); } else { manager.remove(manager.merge(ma)); } } } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#getMapPrinciple(java.lang * .Long) */ @Override public MapPrinciple getMapPrinciple(Long id) throws Exception { javax.persistence.Query query = manager .createQuery("select m from MapPrincipleJpa m where id = :id"); query.setParameter("id", id); return (MapPrinciple) query.getSingleResult(); } /* * (non-Javadoc) * * @see org.ihtsdo.otf.mapping.services.MappingService#getMapPrinciples() */ @SuppressWarnings("unchecked") @Override public MapPrincipleList getMapPrinciples() { List<MapPrinciple> mapPrinciples = new ArrayList<>(); javax.persistence.Query query = manager .createQuery("select m from MapPrincipleJpa m"); // Try query mapPrinciples = query.getResultList(); MapPrincipleListJpa mapPrincipleList = new MapPrincipleListJpa(); mapPrincipleList.setMapPrinciples(mapPrinciples); mapPrincipleList.setTotalCount(mapPrinciples.size()); return mapPrincipleList; } /* * (non-Javadoc) * * @see org.ihtsdo.otf.mapping.services.MappingService#getMapAdvices() */ @SuppressWarnings("unchecked") @Override public MapAdviceList getMapAdvices() { List<MapAdvice> mapAdvices = new ArrayList<>(); javax.persistence.Query query = manager .createQuery("select m from MapAdviceJpa m"); // Try query mapAdvices = query.getResultList(); MapAdviceListJpa mapAdviceList = new MapAdviceListJpa(); mapAdviceList.setMapAdvices(mapAdvices); mapAdviceList.setTotalCount(mapAdvices.size()); return mapAdviceList; } /* * (non-Javadoc) * * @see org.ihtsdo.otf.mapping.services.MappingService#getMapRelations() */ @SuppressWarnings("unchecked") @Override public MapRelationList getMapRelations() { List<MapRelation> mapRelations = new ArrayList<>(); javax.persistence.Query query = manager .createQuery("select m from MapRelationJpa m"); // Try query mapRelations = query.getResultList(); mapRelations = query.getResultList(); MapRelationListJpa mapRelationList = new MapRelationListJpa(); mapRelationList.setMapRelations(mapRelations); mapRelationList.setTotalCount(mapRelations.size()); return mapRelationList; } // / Services for Map Project Creation /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#createMapRecordsForMapProject * (java.lang.Long, org.ihtsdo.otf.mapping.helpers.WorkflowStatus) */ @Override public void createMapRecordsForMapProject(Long mapProjectId, WorkflowStatus workflowStatus) throws Exception { MapProject mapProject = getMapProject(mapProjectId); Logger.getLogger(MappingServiceJpa.class).info( "Called createMapRecordsForMapProject for project - " + mapProjectId + " workflowStatus - " + workflowStatus); if (!getTransactionPerOperation()) { throw new IllegalStateException( "The application must let the service manage transactions for this method"); } // retrieve all complex map ref set members for mapProject javax.persistence.Query query = manager .createQuery("select r from ComplexMapRefSetMemberJpa r " + "where r.refSetId = :refSetId order by r.concept.id, " + "r.mapBlock, r.mapGroup, r.mapPriority"); query.setParameter("refSetId", mapProject.getRefSetId()); List<ComplexMapRefSetMember> complexMapRefSetMembers = new ArrayList<>(); for (Object member : query.getResultList()) { ComplexMapRefSetMember refSetMember = (ComplexMapRefSetMember) member; complexMapRefSetMembers.add(refSetMember); } Logger.getLogger(MappingServiceJpa.class).warn( " " + complexMapRefSetMembers.size() + " complex map refset members found (some skipped)"); createMapRecordsForMapProject(mapProjectId, complexMapRefSetMembers, workflowStatus); } // ONLY FOR TESTING PURPOSES /* * (non-Javadoc) * * @see org.ihtsdo.otf.mapping.services.MappingService# * removeMapRecordsForMapProjectId (java.lang.Long) */ @SuppressWarnings("unchecked") @Override public Long removeMapRecordsForMapProject(Long mapProjectId) { tx = manager.getTransaction(); int nRecords = 0; tx.begin(); List<MapRecord> records = manager .createQuery( "select m from MapRecordJpa m where m.mapProjectId = :mapProjectId") .setParameter("mapProjectId", mapProjectId).getResultList(); for (MapRecord record : records) { // delete notes for (MapNote note : record.getMapNotes()) { if (manager.contains(note)) { manager.remove(note); } else { manager.remove(manager.merge(note)); } } record.setMapNotes(null); // delete entries for (MapEntry entry : record.getMapEntries()) { // remove advices entry.setMapAdvices(null); // merge entry to remove principle/advice associations manager.merge(entry); // delete entry manager.remove(entry); nRecords++; } // remove principles record.setMapPrinciples(null); // merge record to remove principle associations manager.merge(record); // delete record manager.remove(record); } tx.commit(); Logger.getLogger(this.getClass()).debug( Integer.toString(nRecords) + " records deleted for map project id = " + mapProjectId); return new Long(nRecords); } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#createMapRecordsForMapProject * (java.lang.Long, java.util.List) */ @Override public void createMapRecordsForMapProject(Long mapProjectId, List<ComplexMapRefSetMember> complexMapRefSetMembers, WorkflowStatus workflowStatus) throws Exception { MapProject mapProject = getMapProject(mapProjectId); Logger.getLogger(MappingServiceJpa.class).debug( " Creating map records for map project - " + mapProject.getName() + ", assigning workflow status " + WorkflowStatus.PUBLISHED); // Verify application is letting the service manage transactions if (!getTransactionPerOperation()) { throw new IllegalStateException( "The application must let the service manage transactions for this method"); } // Setup content service ContentService contentService = new ContentServiceJpa(); // Get map relation id->name mapping MetadataService metadataService = new MetadataServiceJpa(); Map<String, String> relationIdNameMap = metadataService .getMapRelations(mapProject.getSourceTerminology(), mapProject.getSourceTerminologyVersion()); Logger.getLogger(MappingServiceJpa.class).debug( " relationIdNameMap = " + relationIdNameMap); // use the map relation id->name mapping to construct a hash set of // MapRelations Map<String, MapRelation> mapRelationIdMap = new HashMap<>(); for (MapRelation mapRelation : getMapRelations().getIterable()) { mapRelationIdMap.put(mapRelation.getTerminologyId(), mapRelation); } Map<String, String> hierarchicalRelationshipTypeMap = metadataService .getHierarchicalRelationshipTypes( mapProject.getSourceTerminology(), mapProject.getSourceTerminologyVersion()); if (hierarchicalRelationshipTypeMap.keySet().size() > 1) { throw new IllegalStateException( "Map project source terminology has too many hierarchical relationship types - " + mapProject.getSourceTerminology()); } if (hierarchicalRelationshipTypeMap.keySet().size() < 1) { throw new IllegalStateException( "Map project source terminology has too few hierarchical relationship types - " + mapProject.getSourceTerminology()); } boolean prevTransactionPerOperationSetting = getTransactionPerOperation(); setTransactionPerOperation(false); beginTransaction(); MapAdviceList mapAdvices = getMapAdvices(); int mapPriorityCt = 0; int prevMapGroup = 0; try { // instantiate other local variables String prevConceptId = null; MapRecord mapRecord = null; int ct = 0; MapUser loaderUser = getMapUser("loader"); if (loaderUser == null) { throw new Exception("Loader user could not be found"); } for (ComplexMapRefSetMember refSetMember : complexMapRefSetMembers) { // Skip inactive cases if (!refSetMember.isActive()) { Logger.getLogger(MappingServiceJpa.class).debug( "Skipping refset member " + refSetMember.getTerminologyId()); continue; } // Skip concept exclusion rules in all cases if (refSetMember.getMapRule().matches( "IFA\\s\\d*\\s\\|.*\\s\\|") && !(refSetMember.getMapAdvice() .contains("MAP IS CONTEXT DEPENDENT FOR GENDER")) && !(refSetMember.getMapRule() .matches("IFA\\s\\d*\\s\\|\\s.*\\s\\|\\s[<>]"))) { Logger.getLogger(MappingServiceJpa.class).debug( " Skipping refset member exclusion rule " + refSetMember.getTerminologyId()); continue; } // retrieve the concept Logger.getLogger(MappingServiceJpa.class).debug( " Get refset member concept"); Concept concept = refSetMember.getConcept(); // if no concept for this ref set member, skip if (concept == null) { continue; /* * throw new NoResultException( * " Concept is unexpectedly missing for " + * refSetMember.getTerminologyId()); */ } // if different concept than previous ref set member, create new // mapRecord if (!concept.getTerminologyId().equals(prevConceptId)) { Logger.getLogger(MappingServiceJpa.class).debug( " Creating map record for " + concept.getTerminologyId()); mapPriorityCt = 0; prevMapGroup = 0; mapRecord = new MapRecordJpa(); mapRecord.setConceptId(concept.getTerminologyId()); mapRecord.setConceptName(concept.getDefaultPreferredName()); mapRecord.setMapProjectId(mapProject.getId()); // get the number of descendants - Need to optimize this // Need a tool to compute and save this for LLCs (e.g. // having < 11 // descendants) PfsParameter pfsParameter = new PfsParameterJpa(); pfsParameter.setMaxResults(100); TreePositionList treePositionList = contentService .getTreePositionsWithDescendants( concept.getTerminologyId(), concept.getTerminology(), concept.getTerminologyVersion()); long descCt = 0; if (treePositionList.getCount() > 0) { descCt = treePositionList.getTreePositions().get(0) .getDescendantCount(); } mapRecord.setCountDescendantConcepts(descCt); Logger.getLogger(MappingServiceJpa.class).debug( " Computing descendant ct = " + mapRecord.getCountDescendantConcepts()); // set the previous concept to this concept prevConceptId = refSetMember.getConcept() .getTerminologyId(); // set the owner and lastModifiedBy user fields to // loaderUser mapRecord.setOwner(loaderUser); mapRecord.setLastModifiedBy(loaderUser); // set the workflow status to published mapRecord.setWorkflowStatus(workflowStatus); // persist the record addMapRecord(mapRecord); if (++ct % 500 == 0) { Logger.getLogger(MappingServiceJpa.class).info( " " + ct + " records created"); commit(); beginTransaction(); // For memory management, avoid keeping cache of tree // positions contentService.close(); contentService = new ContentServiceJpa(); } } // check if target is in desired terminology; if so, create // entry String targetName = null; if (!refSetMember.getMapTarget().equals("")) { Concept c = contentService.getConcept( refSetMember.getMapTarget(), mapProject.getDestinationTerminology(), mapProject.getDestinationTerminologyVersion()); if (c == null) { targetName = "Target name could not be determined"; } else { targetName = c.getDefaultPreferredName(); } Logger.getLogger(this.getClass()).debug( " Setting target name " + targetName); } // Set map relation id as well from the cache String relationName = null; if (refSetMember.getMapRelationId() != null) { relationName = relationIdNameMap.get(refSetMember .getMapRelationId()); Logger.getLogger(this.getClass()).debug( " Look up relation name = " + relationName); } Logger.getLogger(this.getClass()).debug( " Create map entry"); MapEntry mapEntry = new MapEntryJpa(); mapEntry.setTargetId(refSetMember.getMapTarget()); mapEntry.setTargetName(targetName); mapEntry.setMapRecord(mapRecord); mapEntry.setMapRelation(mapRelationIdMap.get(refSetMember .getMapRelationId().toString())); String rule = refSetMember.getMapRule(); if (rule.equals("OTHERWISE TRUE")) rule = "TRUE"; mapEntry.setRule(rule); mapEntry.setMapBlock(refSetMember.getMapBlock()); mapEntry.setMapGroup(refSetMember.getMapGroup()); if (prevMapGroup != refSetMember.getMapGroup()) { mapPriorityCt = 0; prevMapGroup = refSetMember.getMapGroup(); } // Increment map priority as we go through records mapEntry.setMapPriority(++mapPriorityCt); mapRecord.addMapEntry(mapEntry); // Add support for advices - and there can be multiple map // advice values // Only add advice if it is an allowable value and doesn't match // relation name // This should automatically exclude IFA/ALWAYS advice Logger.getLogger(this.getClass()).debug( " Setting map advice"); if (refSetMember.getMapAdvice() != null && !refSetMember.getMapAdvice().equals("")) { for (MapAdvice ma : mapAdvices.getIterable()) { if (refSetMember.getMapAdvice().indexOf(ma.getName()) != -1 && !ma.getName().equals(relationName)) { mapEntry.addMapAdvice(ma); Logger.getLogger(this.getClass()).debug( " " + ma.getName()); } } } } commit(); contentService.close(); metadataService.close(); } catch (Exception e) { setTransactionPerOperation(prevTransactionPerOperationSetting); throw e; } setTransactionPerOperation(prevTransactionPerOperationSetting); } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#getTransactionPerOperation * () */ @Override public boolean getTransactionPerOperation() { return transactionPerOperation; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#setTransactionPerOperation * (boolean) */ @Override public void setTransactionPerOperation(boolean transactionPerOperation) { this.transactionPerOperation = transactionPerOperation; } /* * (non-Javadoc) * * @see org.ihtsdo.otf.mapping.services.MappingService#beginTransaction() */ @Override public void beginTransaction() { if (getTransactionPerOperation()) throw new IllegalStateException( "Error attempting to begin a transaction when using transactions per operation mode."); else if (tx != null && tx.isActive()) throw new IllegalStateException( "Error attempting to begin a transaction when there " + "is already an active transaction"); tx = manager.getTransaction(); tx.begin(); } /* * (non-Javadoc) * * @see org.ihtsdo.otf.mapping.services.MappingService#commit() */ @Override public void commit() { if (getTransactionPerOperation()) throw new IllegalStateException( "Error attempting to commit a transaction when using transactions per operation mode."); else if (tx != null && !tx.isActive()) throw new IllegalStateException( "Error attempting to commit a transaction when there " + "is no active transaction"); tx.commit(); } // AGE RANGE FUNCTIONS /* * (non-Javadoc) * * @see org.ihtsdo.otf.mapping.services.MappingService#getMapAgeRanges() */ @Override @SuppressWarnings("unchecked") public MapAgeRangeList getMapAgeRanges() { // construct query javax.persistence.Query query = manager .createQuery("select m from MapAgeRangeJpa m"); MapAgeRangeList m = new MapAgeRangeListJpa(); m.setMapAgeRanges(query.getResultList()); return m; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#addMapAgeRange(org.ihtsdo * .otf.mapping.model.MapAgeRange) */ @Override public MapAgeRange addMapAgeRange(MapAgeRange mapAgeRange) { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); manager.persist(mapAgeRange); tx.commit(); } else { manager.persist(mapAgeRange); } return mapAgeRange; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#removeMapAgeRange(java * .lang.Long) */ @Override public void removeMapAgeRange(Long mapAgeRangeId) { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); MapAgeRange mar = manager.find(MapAgeRangeJpa.class, mapAgeRangeId); if (manager.contains(mar)) { manager.remove(mar); } else { manager.remove(manager.merge(mar)); } tx.commit(); } else { MapAgeRange mar = manager.find(MapAgeRangeJpa.class, mapAgeRangeId); if (manager.contains(mar)) { manager.remove(mar); } else { manager.remove(manager.merge(mar)); } } } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#updateMapAgeRange(org. * ihtsdo .otf.mapping.model.MapAgeRange) */ @Override public void updateMapAgeRange(MapAgeRange mapAgeRange) { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); manager.merge(mapAgeRange); tx.commit(); } else { manager.merge(mapAgeRange); } } // MAP USER PREFERENCES FUNCTIONS @Override public MapUserPreferences getMapUserPreferences(String userName) throws Exception { Logger.getLogger(MappingServiceJpa.class).info( "Finding user " + userName); MapUser mapUser = getMapUser(userName); javax.persistence.Query query = manager .createQuery( "select m from MapUserPreferencesJpa m where mapUser_id = :mapUser_id") .setParameter("mapUser_id", mapUser.getId()); MapUserPreferences m; try { m = (MapUserPreferences) query.getSingleResult(); } // catch no result exception and create default user preferences catch (NoResultException e) { // create object m = new MapUserPreferencesJpa(); m.setMapUser(mapUser); // set the map user MapProjectList mapProjects = getMapProjects(); m.setLastMapProjectId(mapProjects.getIterable().iterator().next() .getId()); // set a default project to 1st project found // add object addMapUserPreferences(m); } // return preferences return m; } /** * Retrieve all map user preferences * * @return a List of MapUserPreferencess */ @Override @SuppressWarnings("unchecked") public MapUserPreferencesList getMapUserPreferences() { List<MapUserPreferences> m = null; // construct query javax.persistence.Query query = manager .createQuery("select m from MapUserPreferencesJpa m"); // Try query m = query.getResultList(); MapUserPreferencesListJpa mapUserPreferencesList = new MapUserPreferencesListJpa(); mapUserPreferencesList.setMapUserPreferences(m); mapUserPreferencesList.setTotalCount(m.size()); return mapUserPreferencesList; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#addMapUserPreferences( * org.ihtsdo.otf.mapping.model.MapUserPreferences) */ @Override public MapUserPreferences addMapUserPreferences( MapUserPreferences mapUserPreferences) { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); manager.persist(mapUserPreferences); tx.commit(); } else { manager.persist(mapUserPreferences); } return mapUserPreferences; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#removeMapUserPreferences * (java.lang.Long) */ @Override public void removeMapUserPreferences(Long mapUserPreferencesId) { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); MapUserPreferences mar = manager.find(MapUserPreferencesJpa.class, mapUserPreferencesId); if (manager.contains(mar)) { manager.remove(mar); } else { manager.remove(manager.merge(mar)); } tx.commit(); } else { MapUserPreferences mar = manager.find(MapUserPreferencesJpa.class, mapUserPreferencesId); if (manager.contains(mar)) { manager.remove(mar); } else { manager.remove(manager.merge(mar)); } } } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#updateMapUserPreferences * (org.ihtsdo .otf.mapping.model.MapUserPreferences) */ @Override public void updateMapUserPreferences(MapUserPreferences mapUserPreferences) { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); manager.merge(mapUserPreferences); tx.commit(); } else { manager.merge(mapUserPreferences); } } // USER ERROR FUNCTIONS /** * Adds the user error. * * @param userError * the user error * @return the user error */ @Override public UserError addUserError(UserError userError) { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); manager.persist(userError); tx.commit(); } else { manager.persist(userError); } return userError; } @Override @SuppressWarnings("unchecked") public UserErrorList getUserErrors() { List<UserError> userErrors = null; // construct query javax.persistence.Query query = manager .createQuery("select m from UserErrorJpa m"); // Try query userErrors = query.getResultList(); UserErrorListJpa userErrorList = new UserErrorListJpa(); userErrorList.setUserErrors(userErrors); userErrorList.setTotalCount(userErrors.size()); return userErrorList; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#getMapUserRole(java.lang * .String, java.lang.Long) */ @Override public MapUserRole getMapUserRoleForMapProject(String userName, Long mapProjectId) throws Exception { Logger.getLogger(MappingServiceJpa.class).info( "Finding user's role " + userName + " " + mapProjectId); // get the user and map project for parameters MapUser mapUser = getMapUser(userName); MapProject mapProject = getMapProject(mapProjectId); // check which collection this user belongs to for this project if (mapProject.getMapAdministrators().contains(mapUser)) { return MapUserRole.ADMINISTRATOR; } else if (mapProject.getMapLeads().contains(mapUser)) { return MapUserRole.LEAD; } else if (mapProject.getMapSpecialists().contains(mapUser)) { return MapUserRole.SPECIALIST; } // default role is Viewer return MapUserRole.VIEWER; } @Override @XmlTransient public ProjectSpecificAlgorithmHandler getProjectSpecificAlgorithmHandler( MapProject mapProject) throws InstantiationException, IllegalAccessException, ClassNotFoundException { ProjectSpecificAlgorithmHandler algorithmHandler = (ProjectSpecificAlgorithmHandler) Class .forName(mapProject.getProjectSpecificAlgorithmHandlerClass()) .newInstance(); algorithmHandler.setMapProject(mapProject); return algorithmHandler; } /** * Sets the valid field for tree positions, given a map project id. * * @param treePositions * the tree positions * @param mapProjectId * the map project id * @return the revised list of tree positions * @throws Exception * the exception */ @Override public TreePositionList setTreePositionValidCodes( List<TreePosition> treePositions, Long mapProjectId) throws Exception { Logger.getLogger(MappingServiceJpa.class).info( "Setting tree position valid codes"); // get the map project and its algorithm handler MapProject mapProject = getMapProject(mapProjectId); ProjectSpecificAlgorithmHandler algorithmHandler = getProjectSpecificAlgorithmHandler(mapProject); setTreePositionValidCodesHelper(treePositions, algorithmHandler); TreePositionListJpa treePositionList = new TreePositionListJpa(); treePositionList.setTreePositions(treePositions); treePositionList.setTotalCount(treePositions.size()); return treePositionList; } /** * Helper function to recursively cycle over nodes and their children. * Instantiated to prevent necessity for retrieving algorithm handler at * each level. Note: Not necessary to return objects, tree positions are * persisted objects * * @param treePositions * the tree positions * @param algorithmHandler * the algorithm handler * @throws Exception * the exception */ public void setTreePositionValidCodesHelper( List<TreePosition> treePositions, ProjectSpecificAlgorithmHandler algorithmHandler) throws Exception { // cycle over all tree positions and check target code, recursively // cycle over children for (TreePosition tp : treePositions) { tp.setValid(algorithmHandler.isTargetCodeValid(tp .getTerminologyId())); setTreePositionValidCodesHelper(tp.getChildren(), algorithmHandler); } } @Override public TreePositionList setTreePositionTerminologyNotes( List<TreePosition> treePositions, Long mapProjectId) throws Exception { Logger.getLogger(MappingServiceJpa.class).info( "Setting tree position terminology notes"); // get the map project and its algorithm handler MapProject mapProject = getMapProject(mapProjectId); ProjectSpecificAlgorithmHandler algorithmHandler = getProjectSpecificAlgorithmHandler(mapProject); // construct the tree position list TreePositionListJpa treePositionList = new TreePositionListJpa(); treePositionList.setTreePositions(treePositions); treePositionList.setTotalCount(treePositions.size()); // compute the target terminology notes algorithmHandler.computeTargetTerminologyNotes(treePositionList); return treePositionList; } @SuppressWarnings("unused") @Override public MapRecordList getOriginMapRecordsForConflict(Long mapRecordId) throws Exception { Logger.getLogger(MappingServiceJpa.class).info( "getRecordsInConflict with record id = " + mapRecordId.toString()); MapRecordList conflictRecords = new MapRecordListJpa(); MapRecord mapRecord = getMapRecord(mapRecordId); MapProject mapProject = getMapProject(mapRecord.getMapProjectId()); if (mapRecord == null) throw new Exception( "getRecordsInConflict: Could not find map record with id = " + mapRecordId.toString() + "!"); // if a conflict between two specialists, retrieve the CONFLICT_DETECTED // records if (mapRecord.getWorkflowStatus().equals(WorkflowStatus.CONFLICT_NEW) || mapRecord.getWorkflowStatus().equals( WorkflowStatus.CONFLICT_IN_PROGRESS)) { // TODO As with review record below, this try/catch block is a // temporary fix for situations where origiinId list is greater than // and where records listed are no longer in the database (e.g. in // audit history) try { for (Long originId : mapRecord.getOriginIds()) { MapRecord mr = getMapRecord(originId); if (mr.getWorkflowStatus().equals( WorkflowStatus.CONFLICT_DETECTED)) { conflictRecords.addMapRecord(getMapRecord(originId)); } } if (conflictRecords.getCount() == 2) { conflictRecords.setTotalCount(conflictRecords.getCount()); return conflictRecords; } } catch (Exception e) { // do nothing } } else if ((mapProject.getWorkflowType().equals("CONFLICT_PROJECT") && (mapRecord .getWorkflowStatus().equals(WorkflowStatus.REVIEW_NEW) || mapRecord .getWorkflowStatus().equals(WorkflowStatus.REVIEW_IN_PROGRESS))) || (mapProject.getWorkflowType().equals("REVIEW_PROJECT") && mapRecord .getOriginIds().size() > 2)) { boolean foundReviewRecord = false; // the specialist's completed // work boolean foundRevisionRecord = false; // the original published work for (Long originId : mapRecord.getOriginIds()) { System.out.println("Getting origin id: " + originId); MapRecord mr = getMapRecord(originId); // TODO This try/cactch block is here to prevent a problem where // a REVIEW record // has been completed, then sent down FIX_ERROR_PATH again, then // reviewed again, // causing origin ids to contain record references that no // longer exist try { if (mr.getWorkflowStatus().equals( WorkflowStatus.REVIEW_NEEDED)) { conflictRecords.addMapRecord(getMapRecord(originId)); foundReviewRecord = true; } else if (mr.getWorkflowStatus().equals( WorkflowStatus.REVISION)) { conflictRecords.addMapRecord(getMapRecord(originId)); foundRevisionRecord = true; } } catch (Exception e) { // do nothing } // once records are found, stop processing origin ids if (foundReviewRecord == true && foundRevisionRecord == true) { conflictRecords.setTotalCount(conflictRecords.getCount()); return conflictRecords; } } } else if (mapProject.getWorkflowType().equals("REVIEW_PROJECT") && mapRecord.getWorkflowStatus().equals( WorkflowStatus.REVIEW_NEW) || mapRecord.getWorkflowStatus().equals( WorkflowStatus.REVIEW_IN_PROGRESS)) { System.out.println("Getting origin id for REVIEW_PROJECT record"); WorkflowService workflowService = new WorkflowServiceJpa(); TrackingRecord tr = workflowService.getTrackingRecordForMapProjectAndConcept(mapProject, mapRecord.getConceptId()); if (tr.getWorkflowPath().equals(WorkflowPath.REVIEW_PROJECT_PATH)) { for (Long originId : mapRecord.getOriginIds()) { try { MapRecord mr = getMapRecord(mapRecord.getOriginIds().iterator() .next()); // check assumption if (!mr.getWorkflowStatus().equals(WorkflowStatus.REVIEW_NEEDED)) { throw new Exception( "Single origin record found for review, but was not REVIEW_NEEDED"); } conflictRecords.addMapRecord(mr); conflictRecords.setTotalCount(conflictRecords.getCount()); return conflictRecords; } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } } else if (tr.getWorkflowPath().equals(WorkflowPath.FIX_ERROR_PATH)) { boolean foundReviewRecord = false; // the specialist's completed // work boolean foundRevisionRecord = false; // the original published // work for (Long originId : mapRecord.getOriginIds()) { System.out.println("Getting origin id: " + originId); MapRecord mr = getMapRecord(originId); // TODO This try/cactch block is here to prevent a problem // where // a REVIEW record // has been completed, then sent down FIX_ERROR_PATH again, // then // reviewed again, // causing origin ids to contain record references that no // longer exist try { if (mr.getWorkflowStatus().equals( WorkflowStatus.REVIEW_NEEDED)) { conflictRecords .addMapRecord(getMapRecord(originId)); foundReviewRecord = true; } else if (mr.getWorkflowStatus().equals( WorkflowStatus.REVISION)) { conflictRecords .addMapRecord(getMapRecord(originId)); foundRevisionRecord = true; } } catch (Exception e) { // do nothing, attempted to find a record that no longer exists } // once records are found, stop processing origin ids if (foundReviewRecord == true && foundRevisionRecord == true) { conflictRecords.setTotalCount(conflictRecords .getCount()); return conflictRecords; } } } else { throw new Exception( "Could not retrieve exactly one origin id for REVIEW_PROJECT path"); } } else { throw new Exception( "Invalid map record passed to conflict origins routine"); } return conflictRecords; } /** * Validate that a single user cannot have more than one role on a * particular map project. * * @param mapProject * the map project * @throws Exception * the exception */ private void validateUserAndRole(MapProject mapProject) throws Exception { Map<MapUser, String> userToRoleMap = new HashMap<>(); for (MapUser user : mapProject.getMapLeads()) { // if user is already in map, throw exception if (userToRoleMap.containsKey(user)) throw new IllegalStateException("Error: User " + user.getName() + " has more than one role."); else userToRoleMap.put(user, "lead"); } for (MapUser user : mapProject.getMapSpecialists()) { // if user is already in map, throw exception if (userToRoleMap.containsKey(user)) throw new IllegalStateException("Error: User " + user.getName() + " has more than one role."); else userToRoleMap.put(user, "specialist"); } for (MapUser user : mapProject.getMapAdministrators()) { // if user is already in map, throw exception if (userToRoleMap.containsKey(user)) throw new IllegalStateException("Error: User " + user.getName() + " has more than one role."); else userToRoleMap.put(user, "administrator"); } } /** * Handle map record lazy initialization. * * @param mapRecord * the map record */ private void handleMapRecordLazyInitialization(MapRecord mapRecord) { // handle all lazy initializations mapRecord.getOwner().getEmail(); mapRecord.getLastModifiedBy().getEmail(); mapRecord.getMapNotes().size(); mapRecord.getMapPrinciples().size(); mapRecord.getOriginIds().size(); for (MapEntry mapEntry : mapRecord.getMapEntries()) { if (mapEntry.getMapRelation() != null) mapEntry.getMapRelation().getName(); mapEntry.getMapAdvices().size(); } } /** * Handle map project lazy initialization. * * @param mapProject * the map project */ private void handleMapProjectLazyInitialization(MapProject mapProject) { // handle all lazy initializations mapProject.getScopeConcepts().size(); mapProject.getScopeExcludedConcepts().size(); mapProject.getMapAdvices().size(); mapProject.getMapRelations().size(); mapProject.getMapLeads().size(); mapProject.getMapSpecialists().size(); mapProject.getMapPrinciples().size(); mapProject.getPresetAgeRanges().size(); } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.services.MappingService#checkMapGroupsForMapProject * (org.ihtsdo.otf.mapping.model.MapProject) */ @Override public void checkMapGroupsForMapProject(MapProject mapProject, boolean updateRecords) throws Exception { Logger.getLogger(MappingServiceJpa.class).info( "Checking map group numbering for project " + mapProject.getName()); Logger.getLogger(MappingServiceJpa.class).info( " Mode: " + (updateRecords ? "Update" : "Check")); MapRecordList mapRecordsInProject = this .getMapRecordsForMapProject(mapProject.getId()); // detach all these records to prevent null-pointer exceptions for (MapRecord mr : mapRecordsInProject.getIterable()) manager.detach(mr); Logger.getLogger(MappingServiceJpa.class).info( "Checking " + mapRecordsInProject.getCount() + " map records."); // logging variables int nRecordsChecked = 0; int nRecordsRemapped = 0; int nMessageInterval = (int) Math .floor(mapRecordsInProject.getCount() / 10); // instantiate the algorithm handler ProjectSpecificAlgorithmHandler algorithmHandler = this.getProjectSpecificAlgorithmHandler(mapProject); // instantiate the services ContentService contentService = new ContentServiceJpa(); WorkflowService workflowService = new WorkflowServiceJpa(); // cycle over all records for (MapRecord mapRecord : mapRecordsInProject.getIterable()) { // create a map representing oldGroup -> newGroup List<Integer> mapGroupsFound = new ArrayList<>(); // map of remappings Map<Integer, Integer> mapGroupRemapping = new HashMap<>(); // find the existing groups for (MapEntry mapEntry : mapRecord.getMapEntries()) { // if this group not already present, add to list if (!mapGroupsFound.contains(mapEntry.getMapGroup())) mapGroupsFound.add(mapEntry.getMapGroup()); } // sort the groups found Collections.sort(mapGroupsFound); // get the total number of groups present int nMapGroups = mapGroupsFound.size(); // if no groups at all, skip this record if (nMapGroups > 0) { // flag for whether map record needs to be modified boolean mapGroupsRemapped = false; // shorthand the min/max values int minGroup = Collections.min(mapGroupsFound); int maxGroup = Collections.max(mapGroupsFound); // if the max group is not equal to the number of groups // or the min group is not equal to 1 if (maxGroup != nMapGroups || minGroup != 1) { mapGroupsRemapped = true; // counter for groups int cumMissingGroups = 0; // cycle over all group values from 0 to max group for (int i = 0; i <= maxGroup; i++) { // if this group present, // - remove the group from set // - subtract current value by the cumulative number of // missed groups found // - add 1 and subtract the value of the min group // - re-add the new remapped group // otherwise // - increment the missing group counter // e.g. (0, 3, 5) goes through the following steps: // 0 -> 0 - 0 + 1 - 0 = 1 -> map as (0, 1) // 1 -> not present, increment offset // 2 -> not present, increment offset // 3 -> 3 - 2 + 1 - 0 = 2 -> map as (3, 2) // 4 -> not present, increment offset // 5 -> 5 - 3 + 1 - 0 = 3 -> map as (5, 3) if (mapGroupsFound.contains(i)) { mapGroupRemapping.put(i, i - cumMissingGroups + 1 - minGroup); } else { cumMissingGroups++; } } } // if errors detected, log if (mapGroupsRemapped == true) { nRecordsRemapped++; Logger.getLogger(MappingServiceJpa.class).info( "Remapping record " + mapRecord.getId() + ": " + mapRecord.getConceptId() + ", " + mapRecord.getConceptName()); String mapLogStr = ""; for (Integer i : mapGroupRemapping.keySet()) { mapLogStr += " " + i + "->" + mapGroupRemapping.get(i); } Logger.getLogger(MappingServiceJpa.class).info( " Remapping: " + mapLogStr); } // if errors detected and update mode specified, update if (mapGroupsRemapped == true && updateRecords == true) { for (MapEntry me : mapRecord.getMapEntries()) { if (mapGroupRemapping.containsKey(me.getMapGroup())) { me.setMapGroup(mapGroupRemapping.get(me.getMapGroup())); } } // check if this record still exists in database (i.e. has not been removed) // only known situation where this should occur is if a lead has saved a // conflict resolution record, but a specialist's remapped record results in // no conflict and publication, causing the lead's record to disappear MapRecord mapRecordInDatabase = this.getMapRecord(mapRecord.getId()); if (mapRecordInDatabase == null) { Logger.getLogger(MappingServiceJpa.class).warn("Map Record " + mapRecord.getId() + " no longer exists"); } else { // remerge the record manager.merge(mapRecord); // get the concept Concept concept = contentService.getConcept( mapRecord.getConceptId(), mapProject.getSourceTerminology(), mapProject.getSourceTerminologyVersion()); try { // process workflow action depending on current status switch (mapRecord.getWorkflowStatus()) { // re-finish all records in a completed state case EDITING_DONE: case CONFLICT_DETECTED: case REVIEW_NEEDED: case CONSENSUS_NEEDED: Logger.getLogger(MappingServiceJpa.class).warn("Finishing record, id = " + mapRecord.getId() + ", workflow status = " + mapRecord.getWorkflowStatus()); workflowService.processWorkflowAction(mapProject, concept, mapRecord.getOwner(), mapRecord, WorkflowAction.FINISH_EDITING); break; // actions requiring Save For Later case CONFLICT_IN_PROGRESS: case CONSENSUS_IN_PROGRESS: case EDITING_IN_PROGRESS: case REVIEW_IN_PROGRESS: Logger.getLogger(MappingServiceJpa.class).warn("Savng record for later, id = " + mapRecord.getId() + ", workflow status = " + mapRecord.getWorkflowStatus()); workflowService.processWorkflowAction(mapProject, concept, mapRecord.getOwner(), mapRecord, WorkflowAction.SAVE_FOR_LATER); break; // qa situations outside the workflow (i.e. published material), simple database update case READY_FOR_PUBLICATION: case PUBLISHED: case REVISION: this.updateMapRecord(mapRecord); Logger.getLogger(MappingServiceJpa.class).warn("Updating record outside the workflow: id = " + mapRecord.getId() + ", workflow status=" + mapRecord.getWorkflowStatus()); break; // workflow statuses that should not even have entries, do nothing and output a warning case NEW: case REVIEW_NEW: case CONFLICT_NEW: case CONSENSUS_NEW: default: Logger.getLogger(MappingServiceJpa.class).error("Record has erroneous workflow state: id = " + mapRecord.getId() + ", workflow status=" + mapRecord.getWorkflowStatus()); break; } } catch (Exception e) { Logger.getLogger(MappingServiceJpa.class).error("Error processing record for concept id = " + concept.getTerminologyId()); e.printStackTrace(); } } } // output logging information if (++nRecordsChecked % nMessageInterval == 0) { Logger.getLogger(MappingServiceJpa.class).info( " " + nRecordsChecked + " records processed (" + (nRecordsChecked / nMessageInterval * 10) + "%), " + nRecordsRemapped + " with group errors"); } } } Logger.getLogger(MappingServiceJpa.class).info( " " + nRecordsChecked + " total records processed (" + nRecordsRemapped + " with group errors"); } }
package com.blackboard.testing.lambda; import static com.blackboard.testing.lambda.logger.LoggerContainer.LOGGER; import static java.util.Optional.ofNullable; import com.blackboard.testing.common.LambdaBaseTest; import com.blackboard.testing.runner.ParallelParameterized; import com.blackboard.testing.testcontext.TestUUID; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.io.FileUtils; import org.junit.runner.RunWith; import org.junit.runner.manipulation.Filter; import org.junit.runners.BlockJUnit4ClassRunner; import org.junit.runners.model.InitializationError; import org.reflections.Reflections; @RunWith(ParallelParameterized.class) public class LambdaTestSuite { private static List<Class> getTestClasses(String folderName) { Reflections reflections = new Reflections(folderName); Set<Class<? extends LambdaBaseTest>> allClasses = reflections.getSubTypesOf(LambdaBaseTest.class); List<Class> classes = new ArrayList<>(); classes.addAll(allClasses); return classes; } protected static List<TestRequest> getTestRequests(String folderName, Filter filter) { List<TestRequest> requests = new ArrayList<>(); getTestClasses(folderName).forEach(testClass -> { try { new BlockJUnit4ClassRunner(testClass).getDescription().getChildren() .forEach(description -> { if (filter.shouldRun(description)) { TestRequest request = new TestRequest(description); request.setTestRunUUID(TestUUID.getTestUUID()); requests.add(request); } }); } catch (InitializationError e) { LOGGER.log(e); } }); return requests; } protected void writeAttachments(Map<String, byte[]> attachments) { File outputDirectory = new File(System.getProperty("user.dir") + "/build/screenshots/"); outputDirectory.mkdirs(); attachments.forEach((fileName, bytes) -> { try { FileUtils.writeByteArrayToFile(new File(outputDirectory, fileName), bytes); } catch (IOException e) { LOGGER.log(e); } }); } protected void logTestResult(TestRequest request, TestResult result) { LOGGER.log("Test %s:%s completed.", request.getTestClass(), request.getFrameworkMethod()); ofNullable(result.getThrowable()).ifPresent(LOGGER::log); } }
package org.languagetool.language; import com.google.common.cache.*; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.languagetool.*; import org.languagetool.chunking.Chunker; import org.languagetool.chunking.EnglishChunker; import org.languagetool.languagemodel.LanguageModel; import org.languagetool.rules.*; import org.languagetool.rules.en.*; import org.languagetool.rules.neuralnetwork.NeuralNetworkRuleCreator; import org.languagetool.rules.neuralnetwork.Word2VecModel; import org.languagetool.rules.patterns.PatternRuleLoader; import org.languagetool.synthesis.Synthesizer; import org.languagetool.synthesis.en.EnglishSynthesizer; import org.languagetool.tagging.Tagger; import org.languagetool.tagging.disambiguation.Disambiguator; import org.languagetool.tagging.en.EnglishHybridDisambiguator; import org.languagetool.tagging.en.EnglishTagger; import org.languagetool.tokenizers.*; import org.languagetool.tokenizers.en.EnglishWordTokenizer; import java.io.*; import java.util.*; import java.util.concurrent.TimeUnit; import java.util.function.Function; /** * Support for English - use the sub classes {@link BritishEnglish}, {@link AmericanEnglish}, * etc. if you need spell checking. * Make sure to call {@link #close()} after using this (currently only relevant if you make * use of {@link EnglishConfusionProbabilityRule}). */ public class English extends Language implements AutoCloseable { private static final LoadingCache<String, List<Rule>> cache = CacheBuilder.newBuilder() .expireAfterWrite(30, TimeUnit.MINUTES) .build(new CacheLoader<String, List<Rule>>() { @Override public List<Rule> load(@NotNull String path) throws IOException { List<Rule> rules = new ArrayList<>(); PatternRuleLoader loader = new PatternRuleLoader(); try (InputStream is = JLanguageTool.getDataBroker().getAsStream(path)) { rules.addAll(loader.getRules(is, path)); } return rules; } }); private static final Language AMERICAN_ENGLISH = new AmericanEnglish(); private LanguageModel languageModel; /** * @deprecated use {@link AmericanEnglish} or {@link BritishEnglish} etc. instead - * they have rules for spell checking, this class doesn't (deprecated since 3.2) */ @Deprecated public English() { } @Override public Language getDefaultLanguageVariant() { return AMERICAN_ENGLISH; } @Override public SentenceTokenizer createDefaultSentenceTokenizer() { return new SRXSentenceTokenizer(this); } @Override public String getName() { return "English"; } @Override public String getShortCode() { return "en"; } @Override public String[] getCountries() { return new String[]{}; } @NotNull @Override public Tagger createDefaultTagger() { return new EnglishTagger(); } @Nullable @Override public Chunker createDefaultChunker() { return new EnglishChunker(); } @Nullable @Override public Synthesizer createDefaultSynthesizer() { return new EnglishSynthesizer(this); } @Override public Disambiguator createDefaultDisambiguator() { return new EnglishHybridDisambiguator(); } @Override public Tokenizer createDefaultWordTokenizer() { return new EnglishWordTokenizer(); } @Override public synchronized LanguageModel getLanguageModel(File indexDir) throws IOException { languageModel = initLanguageModel(indexDir, languageModel); return languageModel; } @Override public synchronized Word2VecModel getWord2VecModel(File indexDir) throws IOException { return new Word2VecModel(indexDir + File.separator + getShortCode()); } @Override public Contributor[] getMaintainers() { return new Contributor[] { new Contributor("Mike Unwalla"), Contributors.MARCIN_MILKOWSKI, Contributors.DANIEL_NABER }; } @Override public LanguageMaintainedState getMaintainedState() { return LanguageMaintainedState.ActivelyMaintained; } @Override public List<Rule> getRelevantRules(ResourceBundle messages, UserConfig userConfig, Language motherTongue, List<Language> altLanguages) throws IOException { List<Rule> allRules = new ArrayList<>(); if (motherTongue != null) { if ("de".equals(motherTongue.getShortCode())) { allRules.addAll(cache.getUnchecked("/org/languagetool/rules/en/grammar-l2-de.xml")); } else if ("fr".equals(motherTongue.getShortCode())) { allRules.addAll(cache.getUnchecked("/org/languagetool/rules/en/grammar-l2-fr.xml")); } } allRules.addAll(Arrays.asList( new CommaWhitespaceRule(messages, Example.wrong("We had coffee<marker> ,</marker> cheese and crackers and grapes."), Example.fixed("We had coffee<marker>,</marker> cheese and crackers and grapes.")), new DoublePunctuationRule(messages), new UppercaseSentenceStartRule(messages, this, Example.wrong("This house is old. <marker>it</marker> was built in 1950."), Example.fixed("This house is old. <marker>It</marker> was built in 1950.")), new MultipleWhitespaceRule(messages, this), new SentenceWhitespaceRule(messages), new WhiteSpaceBeforeParagraphEnd(messages, this), new WhiteSpaceAtBeginOfParagraph(messages), new EmptyLineRule(messages, this), new LongSentenceRule(messages, userConfig, 33, true, true), new LongParagraphRule(messages, this, userConfig), new ParagraphRepeatBeginningRule(messages, this), new PunctuationMarkAtParagraphEnd(messages, this), new PunctuationMarkAtParagraphEnd2(messages, this), // specific to English: new SpecificCaseRule(messages), new EnglishUnpairedBracketsRule(messages, this), new EnglishWordRepeatRule(messages, this), new AvsAnRule(messages), new EnglishWordRepeatBeginningRule(messages, this), new CompoundRule(messages), new ContractionSpellingRule(messages), new EnglishWrongWordInContextRule(messages), new EnglishDashRule(messages), new WordCoherencyRule(messages), new EnglishDiacriticsRule(messages), new EnglishPlainEnglishRule(messages), new EnglishRedundancyRule(messages), new SimpleReplaceRule(messages, this), new ReadabilityRule(messages, this, userConfig, false), new ReadabilityRule(messages, this, userConfig, true) )); return allRules; } @Override public List<Rule> getRelevantLanguageModelRules(ResourceBundle messages, LanguageModel languageModel, UserConfig userConfig) throws IOException { return Arrays.asList( new UpperCaseNgramRule(messages, languageModel, this, userConfig), new EnglishConfusionProbabilityRule(messages, languageModel, this), new EnglishNgramProbabilityRule(messages, languageModel, this) ); } @Override public List<Rule> getRelevantLanguageModelCapableRules(ResourceBundle messages, @Nullable LanguageModel languageModel, GlobalConfig globalConfig, UserConfig userConfig, Language motherTongue, List<Language> altLanguages) throws IOException { if (languageModel != null && motherTongue != null && "fr".equals(motherTongue.getShortCode())) { return Arrays.asList( new EnglishForFrenchFalseFriendRule(messages, languageModel, motherTongue, this) ); } if (languageModel != null && motherTongue != null && "de".equals(motherTongue.getShortCode())) { return Arrays.asList( new EnglishForGermansFalseFriendRule(messages, languageModel, motherTongue, this) ); } return Arrays.asList(); } @Override public List<Rule> getRelevantWord2VecModelRules(ResourceBundle messages, Word2VecModel word2vecModel) throws IOException { return NeuralNetworkRuleCreator.createRules(messages, this, word2vecModel); } @Override public boolean hasNGramFalseFriendRule(Language motherTongue) { return motherTongue != null && ("de".equals(motherTongue.getShortCode()) || "fr".equals(motherTongue.getShortCode())); } /** * Closes the language model, if any. * @since 2.7 */ @Override public void close() throws Exception { if (languageModel != null) { languageModel.close(); } } @Override protected int getPriorityForId(String id) { switch (id) { case "I_E": return 10; // needs higher prio than EN_COMPOUNDS ("i.e learning") case "YEAR_OLD_HYPHEN": return 6; // higher prio than MISSING_HYPHEN case "MISSING_HYPHEN": return 5; case "TRANSLATION_RULE": return 5; // Premium case "WRONG_APOSTROPHE": return 5; case "DOS_AND_DONTS": return 3; case "EN_COMPOUNDS": return 2; case "ABBREVIATION_PUNCTUATION": return 2; case "FEDEX": return 2; // higher prio than many verb rules (e.g. MD_BASEFORM) case "COVID_19": return 1; case "QUIET_QUITE": return 1; // higher prio than A_QUITE_WHILE case "A_OK": return 1; // prefer over A_AN case "I_A": return 1; // higher prio than I_IF case "GOT_GO": return 1; // higher prio than MD_BASEFORM case "UPPERCASE_SENTENCE_START": return 1; // higher prio than AI_MISSING_THE_* case "THERE_FORE": return 1; // higher prio than FORE_FOR case "PRP_NO_VB": return 1; // higher prio than I_IF case "FOLLOW_UP": return 1; // higher prio than MANY_NN case "IT_SOMETHING": return 1; // higher prio than IF_YOU_ANY and IT_THE_PRP case "NO_KNOW": return 1; // higher prio than DOUBLE_NEGATIVE case "WILL_BASED_ON": return 1; // higher prio than MD_BASEFORM / PRP_PAST_PART case "DON_T_AREN_T": return 1; // higher prio than DID_BASEFORM case "WILL_BECOMING": return 1; // higher prio than MD_BASEFORM case "WOULD_NEVER_VBN": return 1; // higher prio than MD_BASEFORM case "MD_APPRECIATED": return 1; // higher prio than MD_BASEFORM case "MONEY_BACK_HYPHEN": return 1; // higher prio than A_UNCOUNTABLE case "WORLDS_BEST": return 1; // higher prio than THE_SUPERLATIVE case "STEP_COMPOUNDS": return 1; // higher prio than STARS_AND_STEPS case "WON_T_TO": return 1; // higher prio than DON_T_AREN_T case "WAN_T": return 1; // higher prio than DON_T_AREN_T case "THE_US": return 1; // higher prio than DT_PRP case "THE_IT": return 1; // higher prio than DT_PRP case "A_NUMBER_NNS": return 1; // higher prio than A_NNS case "A_HUNDREDS": return 1; // higher prio than A_NNS case "NOW_A_DAYS": return 1; // higher prio than A_NNS case "COUPLE_OF_TIMES": return 1; // higher prio than A_NNS case "A_WINDOWS": return 1; // higher prio than A_NNS case "A_SCISSOR": return 1; // higher prio than A_NNS case "A_SNICKERS": return 1; // higher prio than A_NNS case "ROUND_A_BOUT": return 1; // higher prio than A_NNS case "SEEM_SEEN": return 1; // higher prio than HAVE_PART_AGREEMENT, PRP_HAVE_VB, MD_BASEFORM and PRP_PAST_PART case "BORN_IN": return 1; // higher prio than PRP_PAST_PART case "DO_TO": return 1; // higher prio than HAVE_PART_AGREEMENT case "IN_THIS_REGARDS": return 1; // higher prio than THIS_NNS case "NO_WHERE": return 1; // higher prio than NOW case "APOSTROPHE_VS_QUOTE": return 1; // higher prio than EN_QUOTES case "COMMA_PERIOD": return 1; // higher prio than COMMA_PARENTHESIS_WHITESPACE case "HERE_HEAR": return 1; // higher prio than ENGLISH_WORD_REPEAT_RULE case "LIGATURES": return 1; // prefer over spell checker case "APPSTORE": return 1; // prefer over spell checker case "INCORRECT_CONTRACTIONS": return 1; // prefer over EN_CONTRACTION_SPELLING case "DONT_T": return 1; // prefer over EN_CONTRACTION_SPELLING case "WHATS_APP": return 1; // prefer over EN_CONTRACTION_SPELLING case "NON_STANDARD_COMMA": return 1; // prefer over spell checker case "NON_STANDARD_ALPHABETIC_CHARACTERS": return 1; // prefer over spell checker case "WONT_CONTRACTION": return 1; // prefer over WONT_WANT case "YOU_GOOD": return 1; // prefer over PRP_PAST_PART case "THAN_THANK": return 1; // prefer over THAN_THEN case "CD_NN_APOSTROPHE_S": return 1; // prefer over CD_NN and LOWERCASE_NAME_APOSTROPHE_S case "IT_IF": return 1; // needs higher prio than PRP_COMMA and IF_YOU_ANY case "FINE_TUNE_COMPOUNDS": return 1; // prefer over less specific rules case "WHAT_IS_YOU": return 1; // prefer over HOW_DO_I_VB case "SUPPOSE_TO": return 1; // prefer over HOW_DO_I_VB case "PROFANITY": return 5; // prefer over spell checker case "FOR_NOUN_SAKE": return 6; // prefer over PROFANITY (e.g. "for fuck sake") case "RUDE_SARCASTIC": return 6; // prefer over spell checker case "CHILDISH_LANGUAGE": return 8; // prefer over spell checker case "EN_DIACRITICS_REPLACE": return 9; // prefer over spell checker (like PHRASE_REPETITION) case "BLACK_SEA": return -1; // less priority than SEA_COMPOUNDS case "MANY_NN": return -1; // less priority than PUSH_UP_HYPHEN, SOME_FACULTY case "WE_BE": return -1; case "A_LOT_OF_NN": return -1; case "IT_VBZ": return -1; case "IT_IS_2": return -1; // needs higher prio than BEEN_PART_AGREEMENT case "A_RB_NN": return -1; // prefer other more specific rules (e.g. QUIET_QUITE, A_QUITE_WHILE) case "PLURAL_VERB_AFTER_THIS": return -1; // prefer other more specific rules (e.g. COMMA_TAG_QUESTION) case "BE_RB_BE": return -1; // prefer other more specific rules case "IT_ITS": return -1; // prefer other more specific rules case "ENGLISH_WORD_REPEAT_RULE": return -1; // prefer other more specific rules (e.g. IT_IT) case "PRP_MD_NN": return -1; // prefer other more specific rules (e.g. MD_ABLE, WONT_WANT) case "NON_ANTI_PRE_JJ": return -1; // prefer other more specific rules case "DT_JJ_NO_NOUN": return -1; // prefer other more specific rules (e.g. THIRD_PARTY) case "AGREEMENT_SENT_START": return -1; // prefer other more specific rules case "HAVE_PART_AGREEMENT": return -1; // prefer other more specific rules case "PREPOSITION_VERB": return -1; // prefer other more specific rules case "EN_A_VS_AN": return -1; // prefer other more specific rules (with suggestions, e.g. AN_ALSO) case "CD_NN": return -1; // prefer other more specific rules (with suggestions) case "ATD_VERBS_TO_COLLOCATION": return -1; // prefer other more specific rules (with suggestions) case "ADVERB_OR_HYPHENATED_ADJECTIVE": return -1; // prefer other more specific rules (with suggestions) case "GOING_TO_VBD": return -1; // prefer other more specific rules (with suggestions, e.g. GOING_TO_JJ) case "MISSING_PREPOSITION": return -1; // prefer other more specific rules (with suggestions) case "BE_TO_VBG": return -1; // prefer other more specific rules (with suggestions) case "NON3PRS_VERB": return -1; // prefer other more specific rules (with suggestions, e.g. DONS_T) case "DID_FOUND_AMBIGUOUS": return -1; // prefer other more specific rules (e.g. TWO_CONNECTED_MODAL_VERBS) case "BE_I_BE_GERUND": return -1; // prefer other more specific rules (with suggestions) case "VBZ_VBD": return -1; // prefer other more specific rules (e.g. IS_WAS) case "SUPERLATIVE_THAN": return -1; // prefer other more specific rules case "UNLIKELY_OPENING_PUNCTUATION": return -1; // prefer other more specific rules case "METRIC_UNITS_EN_IMPERIAL": return -1; // prefer MILE_HYPHEN case "METRIC_UNITS_EN_GB": return -1; // prefer MILE_HYPHEN case "PRP_RB_NO_VB": return -2; // prefer other more specific rules (with suggestions) case "PRP_VBG": return -2; // prefer other more specific rules (with suggestions, prefer over HE_VERB_AGR) case "PRP_VBZ": return -2; // prefer other more specific rules (with suggestions) case "PRP_VB": return -2; // prefer other more specific rules (with suggestions) case "BEEN_PART_AGREEMENT": return -3; // prefer other more specific rules (e.g. VARY_VERY, VB_NN) case "A_INFINITIVE": return -3; // prefer other more specific rules (with suggestions, e.g. PREPOSITION_VERB) case "HE_VERB_AGR": return -3; // prefer other more specific rules (e.g. PRP_VBG) case "PRP_JJ": return -3; // prefer other rules (e.g. PRP_VBG, IT_IT and ADJECTIVE_ADVERB, PRP_ABLE, PRP_NEW, MD_IT_JJ) case "PRONOUN_NOUN": return -3; // prefer other rules (e.g. PRP_VB, PRP_JJ) case "INDIAN_ENGLISH": return -3; // prefer grammar rules, but higher prio than spell checker case "PRP_THE": return -4; // prefer other rules (e.g. I_A, PRP_JJ, IF_YOU_ANY, I_AN) case "MORFOLOGIK_RULE_EN_US": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_GB": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_CA": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_ZA": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_NZ": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_AU": return -10; // more specific rules (e.g. L2 rules) have priority case "TWO_CONNECTED_MODAL_VERBS": return -15; case "SENTENCE_FRAGMENT": return -50; // prefer other more important sentence start corrections. case "SENTENCE_FRAGMENT_SINGLE_WORDS": return -51; // prefer other more important sentence start corrections. case "EN_REDUNDANCY_REPLACE": return -510; // style rules should always have the lowest priority. case "EN_PLAIN_ENGLISH_REPLACE": return -511; // style rules should always have the lowest priority. case "THREE_NN": return -600; // style rules should always have the lowest priority. case "SENT_START_NUM": return -600; // style rules should always have the lowest priority. case "PASSIVE_VOICE": return -600; // style rules should always have the lowest priority. case "EG_NO_COMMA": return -600; // style rules should always have the lowest priority. case "IE_NO_COMMA": return -600; // style rules should always have the lowest priority. case "REASON_WHY": return -600; // style rules should always have the lowest priority. case LongSentenceRule.RULE_ID: return -997; case LongParagraphRule.RULE_ID: return -998; } if (id.startsWith("CONFUSION_RULE_")) { return -20; } return super.getPriorityForId(id); } @Override public Function<Rule, Rule> getRemoteEnhancedRules(ResourceBundle messageBundle, List<RemoteRuleConfig> configs, UserConfig userConfig, Language motherTongue, List<Language> altLanguages, boolean inputLogging) throws IOException { Function<Rule, Rule> fallback = super.getRemoteEnhancedRules(messageBundle, configs, userConfig, motherTongue, altLanguages, inputLogging); RemoteRuleConfig bert = RemoteRuleConfig.getRelevantConfig(BERTSuggestionRanking.RULE_ID, configs); return original -> { if (original.isDictionaryBasedSpellingRule() && original.getId().startsWith("MORFOLOGIK_RULE_EN")) { if (UserConfig.hasABTestsEnabled() && bert != null) { return new BERTSuggestionRanking(original, bert, userConfig, inputLogging); } } return fallback.apply(original); }; } @Override public List<Rule> getRelevantRemoteRules(ResourceBundle messageBundle, List<RemoteRuleConfig> configs, GlobalConfig globalConfig, UserConfig userConfig, Language motherTongue, List<Language> altLanguages, boolean inputLogging) throws IOException { List<Rule> rules = new ArrayList<>(super.getRelevantRemoteRules( messageBundle, configs, globalConfig, userConfig, motherTongue, altLanguages, inputLogging)); String theInsertionID = "AI_THE_INS_RULE"; RemoteRuleConfig theInsertionConfig = RemoteRuleConfig.getRelevantConfig(theInsertionID, configs); final String missingTheDescription = "This rule identifies whether the article 'the' is missing in a sentence."; final String missingWordDescription = "This rule identifies whether the articles 'a' or 'an' are missing in a sentence."; final String variantsDescription = "Identifies confusion between if, of, off and a misspelling"; final String delMessage = "This article might not be necessary here."; final String insMessage = "You might be missing an article here."; if (theInsertionConfig != null) { Map<String, String> theInsertionMessages = new HashMap<>(); theInsertionMessages.put("THE_INS", delMessage); theInsertionMessages.put("INS_THE", insMessage); Rule theInsertionRule = GRPCRule.create(theInsertionConfig, inputLogging, theInsertionID, missingTheDescription, theInsertionMessages); rules.add(theInsertionRule); } String missingTheID = "AI_MISSING_THE"; RemoteRuleConfig missingTheConfig = RemoteRuleConfig.getRelevantConfig(missingTheID, configs); if (missingTheConfig != null) { Map<String, String> missingTheMessages = new HashMap<>(); missingTheMessages.put("MISSING_THE", insMessage); Rule missingTheRule = GRPCRule.create(missingTheConfig, inputLogging, missingTheID, missingTheDescription, missingTheMessages); rules.add(missingTheRule); } String missingWordID = "AI_MISSING_WORD"; RemoteRuleConfig missingWordConfig = RemoteRuleConfig.getRelevantConfig(missingWordID, configs); if (missingWordConfig != null) { Rule missingWordRule = GRPCRule.create(missingWordConfig, inputLogging, missingWordID, missingWordDescription, Collections.emptyMap());// provided by server rules.add(missingWordRule); } List<String> confpairRules = Arrays.asList("AI_CONFPAIRS_EN_GPT2", "AI_CONFPAIRS_EN_GPT2_L", "AI_CONFPAIRS_EN_GPT2_XL"); for (String confpairID : confpairRules) { RemoteRuleConfig confpairConfig = RemoteRuleConfig.getRelevantConfig(confpairID, configs); if (confpairConfig != null) { Rule confpairRule = new GRPCConfusionRule(messageBundle, confpairConfig, inputLogging); rules.add(confpairRule); } } String variantsID = "EN_VARIANTS_MODEL"; RemoteRuleConfig variantsConfig = RemoteRuleConfig.getRelevantConfig(variantsID, configs); if (variantsConfig != null) { Rule variantsRule = GRPCRule.create(variantsConfig, inputLogging, variantsID, variantsDescription, Collections.emptyMap()); rules.add(variantsRule); } return rules; } }
package com.prolificinteractive.materialcalendarview; import android.content.Context; import android.support.v4.view.BetterViewPager; import android.view.MotionEvent; /** * Custom ViewPager that allows swiping to be disabled. */ class CalendarPager extends BetterViewPager { private boolean pagingEnabled = true; public CalendarPager(Context context) { super(context); } /** * enable disable viewpager scroll * * @param pagingEnabled false to disable paging, true for paging (default) */ public void setPagingEnabled(boolean pagingEnabled) { this.pagingEnabled = pagingEnabled; } /** * @return is this viewpager allowed to page */ public boolean isPagingEnabled() { return pagingEnabled; } @Override public boolean onInterceptTouchEvent(MotionEvent ev) { return pagingEnabled && super.onInterceptTouchEvent(ev); } @Override public boolean onTouchEvent(MotionEvent ev) { return pagingEnabled && super.onTouchEvent(ev); } @Override public boolean canScrollVertically(int direction) { /** * disables scrolling vertically when paging disabled, fixes scrolling * for nested {@link android.support.v4.view.ViewPager} */ return pagingEnabled && super.canScrollVertically(direction); } @Override public boolean canScrollHorizontally(int direction) { /** * disables scrolling horizontally when paging disabled, fixes scrolling * for nested {@link android.support.v4.view.ViewPager} */ return pagingEnabled && super.canScrollHorizontally(direction); } }
/** * This file is automatically generated by wheat-build. * Do not modify this file -- YOUR CHANGES WILL BE ERASED! */ package x7c1.linen.res.layout; import android.content.Context; import android.view.LayoutInflater; import android.view.ViewGroup; import android.view.View; import android.widget.TextView; import android.widget.ImageButton; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.SwitchCompat; import x7c1.wheat.ancient.resource.ViewHolderProvider; import x7c1.wheat.ancient.resource.ViewHolderProviderFactory; import x7c1.linen.R; import x7c1.linen.glue.res.layout.SettingScheduleRowItem; public class SettingScheduleRowItemProvider implements ViewHolderProvider<SettingScheduleRowItem> { private final LayoutInflater inflater; public SettingScheduleRowItemProvider(Context context){ this.inflater = LayoutInflater.from(context); } public SettingScheduleRowItemProvider(LayoutInflater inflater){ this.inflater = inflater; } @Override public int layoutId(){ return R.layout.setting_schedule_row__item; } @Override public SettingScheduleRowItem inflateOn(ViewGroup parent){ return inflate(parent, false); } @Override public SettingScheduleRowItem inflate(ViewGroup parent, boolean attachToRoot){ View view = inflater.inflate(R.layout.setting_schedule_row__item, parent, attachToRoot); return factory().createViewHolder(view); } @Override public SettingScheduleRowItem inflate(){ return inflate(null, false); } public static ViewHolderProviderFactory<SettingScheduleRowItem> factory(){ return new ViewHolderProviderFactory<SettingScheduleRowItem>() { @Override public ViewHolderProvider<SettingScheduleRowItem> create(LayoutInflater inflater){ return new SettingScheduleRowItemProvider(inflater); } @Override public ViewHolderProvider<SettingScheduleRowItem> create(Context context){ return new SettingScheduleRowItemProvider(context); } @Override public SettingScheduleRowItem createViewHolder(View view){ return new SettingScheduleRowItem( view, (TextView) view.findViewById(R.id.setting_schedule_row__item__name), (ImageButton) view.findViewById(R.id.setting_schedule_row__item__menu), (android.support.v7.widget.RecyclerView) view.findViewById(R.id.setting_schedule_row__item__time_ranges), (android.support.v7.widget.SwitchCompat) view.findViewById(R.id.setting_schedule_row__item__enabled), (TextView) view.findViewById(R.id.setting_schedule_row__item__history), (TextView) view.findViewById(R.id.setting_schedule_row__item__edit_time) ); } }; } }
package org.jboss.forge.furnace.maven.plugin; import java.io.File; import java.io.IOException; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.settings.Settings; import org.jboss.forge.furnace.Furnace; import org.jboss.forge.furnace.addons.AddonId; import org.jboss.forge.furnace.impl.FurnaceImpl; import org.jboss.forge.furnace.manager.AddonManager; import org.jboss.forge.furnace.manager.impl.AddonManagerImpl; import org.jboss.forge.furnace.manager.maven.addon.MavenAddonDependencyResolver; import org.jboss.forge.furnace.manager.request.InstallRequest; import org.jboss.forge.furnace.repositories.AddonRepository; import org.jboss.forge.furnace.repositories.AddonRepositoryMode; /** * Goal which installs addons to a specified directory * * @author <a href="ggastald@redhat.com">George Gastaldi</a> */ @Mojo(name = "addon-install", defaultPhase = LifecyclePhase.PREPARE_PACKAGE, threadSafe = true, requiresProject = false) public class AddonInstallMojo extends AbstractMojo { /** * Addon repository file location */ @Parameter(property = "furnace.repository", required = true) private File addonRepository; /** * Addon IDs to install */ @Parameter(property = "furnace.addonIds", required = true) private String[] addonIds; /** * Classifier used for addon resolution (default is forge-addon) */ @Parameter(defaultValue = "forge-addon") private String classifier; /** * The current settings */ @Parameter(defaultValue = "${settings}", required = true, readonly = true) private Settings settings; /** * Skip Addon API version resolution? Default is false */ @Parameter(property = "furnace.addon.api.resolution.skip") private boolean skipAddonAPIVersionResolution; /** * Overwrite addon repositoy Resolve Addon API Versions ? Default is true */ @Parameter(property = "furnace.addon.overwrite", defaultValue = "true") private boolean overwrite = true; /** * Skip this execution ? */ @Parameter(property = "furnace.addon.skip") private boolean skip; @Override public void execute() throws MojoExecutionException, MojoFailureException { if (skip) { getLog().info("Execution skipped."); return; } Furnace forge = new FurnaceImpl(); if (!addonRepository.exists()) { addonRepository.mkdirs(); } else if (overwrite) { try { deleteDirectory(addonRepository); addonRepository.mkdirs(); } catch (IOException e) { throw new MojoExecutionException("Could not delete " + addonRepository, e); } } AddonRepository repository = forge.addRepository(AddonRepositoryMode.MUTABLE, addonRepository); MavenAddonDependencyResolver addonResolver = new MavenAddonDependencyResolver(this.classifier); addonResolver.setSettings(settings); addonResolver.setResolveAddonAPIVersions(!skipAddonAPIVersionResolution); AddonManager addonManager = new AddonManagerImpl(forge, addonResolver); for (String addonId : addonIds) { AddonId id = AddonId.fromCoordinates(addonId); InstallRequest install = addonManager.install(id, repository); if (!install.getActions().isEmpty()) { getLog().info("" + install); install.perform(); } } } private void deleteDirectory(File addonRepository) throws IOException { Files.walkFileTree(addonRepository.toPath(), new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { Files.delete(file); return FileVisitResult.CONTINUE; } @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { Files.delete(dir); return FileVisitResult.CONTINUE; } }); } }
package jpaoletti.jpm.core.operations; import java.util.Set; import jpaoletti.jpm.core.PMContext; import jpaoletti.jpm.core.PMException; /** * * @author jpaoletti */ public class SelectItemOperation extends OperationCommandSupport { public SelectItemOperation(String operationId) { super(operationId); } public SelectItemOperation() { super("selectitem"); } @Override protected void doExecute(PMContext ctx) throws PMException { super.doExecute(ctx); final String _item = (String) ctx.getParameter("idx"); if (_item != null) { final Integer item = Integer.parseInt(_item); final Set<Integer> selectedIndexes = ctx.getEntityContainer().getSelectedIndexes(); if (selectedIndexes.contains(item)) { selectedIndexes.remove(item); } else { selectedIndexes.add(item); } } } }
package be.ibridge.kettle.core.database; import be.ibridge.kettle.core.Const; import be.ibridge.kettle.core.value.Value; /** * Contains Oracle specific information through static final members * * @author Matt * @since 11-mrt-2005 */ public class OracleDatabaseMeta extends BaseDatabaseMeta implements DatabaseInterface { /** * Construct a new database connections. Note that not all these parameters are not allways mandatory. * * @param name The database name * @param access The type of database access * @param host The hostname or IP address * @param db The database name * @param port The port on which the database listens. * @param user The username * @param pass The password */ public OracleDatabaseMeta(String name, String access, String host, String db, String port, String user, String pass) { super(name, access, host, db, port, user, pass); } public OracleDatabaseMeta() { } public String getDatabaseTypeDesc() { return "ORACLE"; } public String getDatabaseTypeDescLong() { return "Oracle"; } /** * @return Returns the databaseType. */ public int getDatabaseType() { return DatabaseMeta.TYPE_DATABASE_ORACLE; } public int[] getAccessTypeList() { return new int[] { DatabaseMeta.TYPE_ACCESS_NATIVE, DatabaseMeta.TYPE_ACCESS_ODBC, DatabaseMeta.TYPE_ACCESS_OCI}; } public int getDefaultDatabasePort() { if (getAccessType()==DatabaseMeta.TYPE_ACCESS_NATIVE) return 1521; return -1; } /** * @return Whether or not the database can use auto increment type of fields (pk) */ public boolean supportsAutoInc() { return false; } /** * @see be.ibridge.kettle.core.database.DatabaseInterface#getLimitClause(int) */ public String getLimitClause(int nrRows) { return " WHERE ROWNUM <= "+nrRows; } /** * Returns the minimal SQL to launch in order to determine the layout of the resultset for a given database table * @param tableName The name of the table to determine the layout for * @return The SQL to launch. */ public String getSQLQueryFields(String tableName) { return "SELECT /*+FIRST_ROWS*/ * FROM "+tableName+" WHERE ROWNUM < 1"; } public String getDriverClass() { if (getAccessType()==DatabaseMeta.TYPE_ACCESS_ODBC) { return "sun.jdbc.odbc.JdbcOdbcDriver"; } else { return "oracle.jdbc.driver.OracleDriver"; } } public String getURL() { if (getAccessType()==DatabaseMeta.TYPE_ACCESS_ODBC) { return "jdbc:odbc:"+getDatabaseName(); } else if (getAccessType()==DatabaseMeta.TYPE_ACCESS_NATIVE) { return "jdbc:oracle:thin:@"+getHostname()+":"+getDatabasePortNumberString()+":"+getDatabaseName(); } else // OCI { if (getDatabaseName()!=null && getDatabaseName().length()>0) { return "jdbc:oracle:oci:@(description=(address=(host="+getHostname()+")(protocol=tcp)(port="+getDatabasePortNumberString()+"))(connect_data=(sid="+getDatabaseName()+")))"; } else { return "jdbc:oracle:oci:@"+getDatabaseName(); } } } /** * Oracle doesn't support options in the URL, we need to put these in a Properties object at connection time... */ public boolean supportsOptionsInURL() { return false; } /** * @return true if the database supports sequences */ public boolean supportsSequences() { return true; } /** * Check if a sequence exists. * @param sequenceName The sequence to check * @return The SQL to get the name of the sequence back from the databases data dictionary */ public String getSQLSequenceExists(String sequenceName) { return "SELECT * FROM USER_SEQUENCES WHERE SEQUENCE_NAME = '"+sequenceName.toUpperCase()+"'"; } /** * Get the current value of a database sequence * @param sequenceName The sequence to check * @return The current value of a database sequence */ public String getSQLCurrentSequenceValue(String sequenceName) { return "SELECT "+sequenceName+".currval FROM DUAL"; } /** * Get the SQL to get the next value of a sequence. (Oracle only) * @param sequenceName The sequence name * @return the SQL to get the next value of a sequence. (Oracle only) */ public String getSQLNextSequenceValue(String sequenceName) { return "SELECT "+sequenceName+".nextval FROM dual"; } /** * @return true if we need to supply the schema-name to getTables in order to get a correct list of items. */ public boolean useSchemaNameForTableList() { return true; } /** * @return true if the database supports synonyms */ public boolean supportsSynonyms() { return true; } /** * Generates the SQL statement to add a column to the specified table * @param tablename The table to add * @param v The column defined as a value * @param tk the name of the technical key field * @param use_autoinc whether or not this field uses auto increment * @param pk the name of the primary key field * @param semicolon whether or not to add a semi-colon behind the statement. * @return the SQL statement to add a column to the specified table */ public String getAddColumnStatement(String tablename, Value v, String tk, boolean use_autoinc, String pk, boolean semicolon) { return "ALTER TABLE "+tablename+" ADD ( "+getFieldDefinition(v, tk, pk, use_autoinc, true, false)+" ) "; } /** * Generates the SQL statement to drop a column from the specified table * @param tablename The table to add * @param v The column defined as a value * @param tk the name of the technical key field * @param use_autoinc whether or not this field uses auto increment * @param pk the name of the primary key field * @param semicolon whether or not to add a semi-colon behind the statement. * @return the SQL statement to drop a column from the specified table */ public String getDropColumnStatement(String tablename, Value v, String tk, boolean use_autoinc, String pk, boolean semicolon) { return "ALTER TABLE "+tablename+" DROP ( "+v.getName()+" ) "+Const.CR; } /** * Generates the SQL statement to modify a column in the specified table * @param tablename The table to add * @param v The column defined as a value * @param tk the name of the technical key field * @param use_autoinc whether or not this field uses auto increment * @param pk the name of the primary key field * @param semicolon whether or not to add a semi-colon behind the statement. * @return the SQL statement to modify a column in the specified table */ public String getModifyColumnStatement(String tablename, Value v, String tk, boolean use_autoinc, String pk, boolean semicolon) { return "ALTER TABLE "+tablename+" MODIFY ("+getFieldDefinition(v, tk, pk, use_autoinc, true, false)+" )"; } public String getFieldDefinition(Value v, String tk, String pk, boolean use_autoinc, boolean add_fieldname, boolean add_cr) { StringBuffer retval=new StringBuffer(128); String fieldname = v.getName(); int length = v.getLength(); int precision = v.getPrecision(); if (add_fieldname) retval.append(fieldname).append(' '); int type = v.getType(); switch(type) { case Value.VALUE_TYPE_DATE : retval.append("DATE"); break; case Value.VALUE_TYPE_BOOLEAN: retval.append("CHAR(1)"); break; case Value.VALUE_TYPE_NUMBER : case Value.VALUE_TYPE_INTEGER: case Value.VALUE_TYPE_BIGNUMBER: retval.append("NUMBER"); if (length>0) { retval.append('(').append(length); if (precision>0) { retval.append(", ").append(precision); } retval.append(')'); } break; case Value.VALUE_TYPE_STRING: if (length>=DatabaseMeta.CLOB_LENGTH) { retval.append("CLOB"); } else { if (length>0 && length<=2000) { retval.append("VARCHAR2(").append(length).append(')'); } else { if (length<=0) { retval.append("VARCHAR2(2000)"); // We don't know, so we just use the maximum... } else { retval.append("CLOB"); } } } break; case Value.VALUE_TYPE_BINARY: // the BLOB can contain binary data. { retval.append("BLOB"); } break; default: retval.append(" UNKNOWN"); break; } if (add_cr) retval.append(Const.CR); return retval.toString(); } /* (non-Javadoc) * @see com.ibridge.kettle.core.database.DatabaseInterface#getReservedWords() */ public String[] getReservedWords() { return new String[] { "ACCESS", "ADD", "ALL", "ALTER", "AND", "ANY", "ARRAYLEN", "AS", "ASC", "AUDIT", "BETWEEN", "BY", "CHAR", "CHECK", "CLUSTER", "COLUMN", "COMMENT", "COMPRESS", "CONNECT", "CREATE", "CURRENT", "DATE", "DECIMAL", "DEFAULT", "DELETE", "DESC", "DISTINCT", "DROP", "ELSE", "EXCLUSIVE", "EXISTS", "FILE", "FLOAT", "FOR", "FROM", "GRANT", "GROUP", "HAVING", "IDENTIFIED", "IMMEDIATE", "IN", "INCREMENT", "INDEX", "INITIAL", "INSERT", "INTEGER", "INTERSECT", "INTO", "IS", "LEVEL", "LIKE", "LOCK", "LONG", "MAXEXTENTS", "MINUS", "MODE", "MODIFY", "NOAUDIT", "NOCOMPRESS", "NOT", "NOTFOUND", "NOWAIT", "NULL", "NUMBER", "OF", "OFFLINE", "ON", "ONLINE", "OPTION", "OR", "ORDER", "PCTFREE", "PRIOR", "PRIVILEGES", "PUBLIC", "RAW", "RENAME", "RESOURCE", "REVOKE", "ROW", "ROWID", "ROWLABEL", "ROWNUM", "ROWS", "SELECT", "SESSION", "SET", "SHARE", "SIZE", "SMALLINT", "SQLBUF", "START", "SUCCESSFUL", "SYNONYM", "SYSDATE", "TABLE", "THEN", "TO", "TRIGGER", "UID", "UNION", "UNIQUE", "UPDATE", "USER", "VALIDATE", "VALUES", "VARCHAR", "VARCHAR2", "VIEW", "WHENEVER", "WHERE", "WITH" }; } /** * @return The SQL on this database to get a list of stored procedures. */ public String getSQLListOfProcedures() { return "SELECT DISTINCT DECODE(package_name, NULL, '', package_name||'.')||object_name FROM user_arguments"; } public String getSQLLockTables(String tableNames[]) { StringBuffer sql=new StringBuffer(128); for (int i=0;i<tableNames.length;i++) { sql.append("LOCK TABLE ").append(tableNames[i]).append(" IN EXCLUSIVE MODE;").append(Const.CR); } return sql.toString(); } public String getSQLUnlockTables(String tableNames[]) { return null; // commit handles the unlocking! } /** * @return extra help text on the supported options on the selected database platform. */ public String getExtraOptionsHelpText() { return "Source of information: http: Const.CR+ "Key Value Comment"+Const.CR+ " "user String The value of this property is used as the user name when connecting to the database."+Const.CR+ "password String The value of this property is used as the password when connecting to the database."+Const.CR+ "database String The value of this property is used as the SID of the database."+Const.CR+ "server String The value of this property is used as the host name of the database."+Const.CR+ "internal_logon String The value of this property is used as the user name when performing an internal logon. Usually this will be SYS or SYSDBA."+Const.CR+ "defaultRowPrefetch int The value of this property is used as the default number of rows to prefetch."+Const.CR+ "defaultExecuteBatch int The value of this property is used as the default batch size when using Oracle style batching."+Const.CR+ "processEscapes boolean If the value of this property is 'false' then the default setting for Statement.setEscapeProccessing is false."+Const.CR+ "disableDefineColumnType boolean When this connection property has the value true, the method defineColumnType is has no effect. This is highly recommended when using the Thin driver, especially when the database character set contains four byte characters that expand to two UCS2 surrogate characters, e.g. AL32UTF8. The method defineColumnType provides no performance benefit (or any other benefit) when used with the 10.1.0 Thin driver. This property is provided so that you do not have to remove the calls from your code. This is especially valuable if you use the same code with Thin driver and either the OCI or Server Internal driver."+Const.CR+ "DMSName String Set the name of the DMS Noun that is the parent of all JDBC DMS metrics."+Const.CR+ "DMSType String Set the type of the DMS Noun that is the parent of all JDBC DMS metrics."+Const.CR+ "AccumulateBatchResult boolean When using Oracle style batching, JDBC determines when to flush a batch to the database. If this property is true, then the number of modified rows accumulated across all batches flushed from a single statement. The default is to count each batch separately."+Const.CR+ "oracle.jdbc.J2EE13Compliant boolean If the value of this property is 'true', JDBC uses strict compliance for some edge cases. " +Const.CR+ " In general Oracle's JDBC drivers will allow some operations that are not permitted in the strict interpretation of J2EE 1.3. " +Const.CR+ " Setting this property to true will cause those cases to throw SQLExceptions. " +Const.CR+ " There are some other edge cases where Oracle's JDBC drivers have slightly different behavior than defined in J2EE 1.3. " +Const.CR+ " This results from Oracle having defined the behavior prior to the J2EE 1.3 specification and the resultant need for compatibility with existing customer code. " +Const.CR+ " Setting this property will result in full J2EE 1.3 compliance at the cost of incompatibility with some customer code. " +Const.CR+ " Can be either a system property or a connection property. The default value of this property is 'false' in classes12.jar and ojdbc12.jar. " +Const.CR+ " The default value is 'true' in classes12dms.jar and ojdbc14dms.jar. " +Const.CR+ " It is true in the dms jars because they are used almost exclusively in Oracle Application Server and so J2EE compatibility is more important than compatibility with previous Oracle versions."+Const.CR+ "oracle.jdbc.TcpNoDelay boolean If the value of this property is 'true', the TCP_NODELAY property is set on the socket when using the Thin driver. See java.net.SocketOptions.TCP_NODELAY. Can be either a system property or a connection property."+Const.CR+ "defaultNChar boolean If the value of this property is 'true', the default mode for all character data columns will be NCHAR."+Const.CR+ "useFetchSizeWithLongColumn boolean If the value of this property is 'true', then JDBC will prefetch rows even though there is a LONG or LONG RAW column in the result. By default JDBC fetches only one row at a time if there are LONG or LONG RAW columns in the result. Setting this property to true can improve performance but can also cause SQLExceptions if the results are too big."+Const.CR+ "remarksReporting boolean If the value of this property is 'true', OracleDatabaseMetaData will include remarks in the metadata. This can result in a substantial reduction in performance."+Const.CR+ "includeSynonyms boolean If the value of this property is 'true', JDBC will include synonyms when getting information about a column."+Const.CR+ "restrictGetTables boolean If the value of this property is 'true', JDBC will return a more refined value for DatabaseMetaData.getTables. By default JDBC will return things that are not accessible tables. These can be non-table objects or accessible synonymns for inaccessible tables. If this property is true JDBC will return only accessible tables. This has a substantial performance penalty."+Const.CR+ "fixedString boolean If the value of this property is 'true', JDBC will use FIXED CHAR semantic when setObject is called with a String argument. By default JDBC uses VARCHAR semantics. The difference is in blank padding. With the default there is no blank padding so, for example, 'a' does not equal 'a ' in a CHAR(4). If true these two will be equal."+Const.CR+ "oracle.jdbc.ocinativelibrary String Set the name of the native library for the oci driver. If not set, the default name, libocijdbcX (X is a version number), is used."+Const.CR+ "SetBigStringTryClob boolean Setting this property to 'true' forces PreparedStatement.setString() method to use setStringForClob() if the data is larger than 32765 bytes. Please note that using this method with VARCHAR and LONG columns may cause large data to be truncated silently, or cause other errors differing from the normal behavior of setString()."+Const.CR ; } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package ch.unizh.ini.jaer.projects.gesture.hmm; import ch.unizh.ini.jaer.projects.gesture.virtualdrummer.BlurringFilter2DTracker; import java.awt.*; import java.awt.event.*; import java.awt.geom.Point2D; import java.io.*; import java.util.*; import java.util.List; import java.util.logging.Level; import javax.media.opengl.GLAutoDrawable; import javax.swing.*; import javax.swing.Timer; import net.sf.jaer.chip.AEChip; import net.sf.jaer.event.EventPacket; import net.sf.jaer.eventprocessing.*; import net.sf.jaer.eventprocessing.tracking.ClusterPathPoint; import net.sf.jaer.graphics.FrameAnnotater; import net.sf.jaer.util.filter.LowpassFilter2d; /** * Gesture recognition system using a single DVS sensor. * BluringFilter2DTracker is used to obtain the trajectory of moving object (eg, hand) * HMM is used for classification. But, HMM is not used for spoting gestures (i.e., finding the start and end timing of gestures) * Gesture spotting is done by the tracker by assuming that there is slow movement between gestures. * * @author Jun Haeng Lee */ public class GestureBF2D extends EventFilter2D implements FrameAnnotater,Observer{ /** * a cluster with points more than this amount will be checked for gesture recognition. */ private int numPointsThreshold = getPrefs().getInt("GestureBF2D.numPointsThreshold", 100); /** * retries HMM after this percents of head points is removed from the trajectory when the first tiral is failed. */ private int headTrimmingPercents = getPrefs().getInt("GestureBF2D.headTrimmingPercents", 30); /** * retries HMM after this percents of tail points is removed from the trajectory when the first tiral is failed. */ private int tailTrimmingPercents = getPrefs().getInt("GestureBF2D.tailTrimmingPercents", 10); /** * speed threshold of the cluster to be a gesture candidate (in kPPT). */ private float maxSpeedThreshold_kPPT = getPrefs().getFloat("GestureBF2D.maxSpeedThreshold_kPPT", 0.1f); /** * enables lowpass filter to smooth the gesture trajectory */ private boolean enableLPF = getPrefs().getBoolean("GestureBF2D.enableLPF", true); /** * lowpass filter time constant for gesture trajectory in ms */ private float tauPathMs = getPrefs().getFloat("GestureBF2D.tauPathMs",5.0f); /** * refractory time in ms between gestures. */ private int refractoryTimeMs = getPrefs().getInt("GestureBF2D.refractoryTimeMs", 700); /** * true if the gesture recognition system is activated. */ private boolean login = false; /** * path of gesture picture files */ public static String pathGesturePictures = "C:/Users/jun/Documents/gesture pictures/"; /** * images for gestures */ private Image imgHi, imgBye, imgLeft, imgRight, imgUp, imgDown, imgCW, imgCCW, imgCheck, imgPush; /** * timmings in the current and previous gestures */ protected int startTimeGesture, endTimeGesture, endTimePrevGesture = 0; /** * 'Check' gesture is recognized by a check shape or a sequence of 'SlashDown' and 'SlashUp' * checkActivated is true if 'SlashDown' is detected. It's false otherwise. */ private boolean checkActivated = false; /** * time duration limit between 'SlashDown' and 'SlashUp' to make a valid 'Check' gesture */ private static int checkActivationTimeUs = 400000; /** * previous path */ private ArrayList<ClusterPathPoint> prevPath; /** * moving object tracker */ protected BlurringFilter2DTracker tracker; /** * feature extractor */ FeatureExtraction fve = new FeatureExtraction(16, 16); /** * Hand drawing panel with gesture HMM module */ HmmDrawingPanel hmmDP; /** * low pass filter to smoothe the trajectory of gestures */ LowpassFilter2d lpf; /** * constructor * * @param chip */ @SuppressWarnings({"LeakingThisInConstructor", "OverridableMethodCallInConstructor"}) public GestureBF2D(AEChip chip) { super(chip); this.chip = chip; chip.addObserver(this); String trimming = "Trimming", selection = "Selection", lpfilter = "Low pass filter", gesture = "Gesture"; setPropertyTooltip(selection,"numPointsThreshold","a cluster with points more than this amount will be checked for gesture recognition."); setPropertyTooltip(selection,"maxSpeedThreshold_kPPT","speed threshold of the cluster to be a gesture candidate (in kPPT)."); setPropertyTooltip(trimming,"headTrimmingPercents","retries HMM after this percents of head points is removed from the trajectory when the first tiral is failed."); setPropertyTooltip(trimming,"tailTrimmingPercents","retries HMM after this percents of tail points is removed from the trajectory when the first tiral is failed."); setPropertyTooltip(lpfilter,"enableLPF","enables lowpass filter to smooth the gesture trajectory"); setPropertyTooltip(lpfilter,"tauPathMs","lowpass filter time constant for gesture trajectory in ms"); setPropertyTooltip(gesture,"refractoryTimeMs","refractory time in ms between gestures"); // low pass filter this.lpf = new LowpassFilter2d(); // hand drawing panel with gesture HMM String [] bNames = {"Add", "Remove", "Reset", "Show", "Learn", "Guess"}; hmmDP = new HmmDrawingPanel("HMM based gesture recognition test using hand drawing panel", bNames); hmmDP.setVisible(false); // load gesture images into the memory loadGestureImages(); // encloses tracker filterChainSetting (); } /** * sets the BlurringFilter2DTracker as a enclosed filter to find cluster */ protected void filterChainSetting (){ tracker = new BlurringFilter2DTracker(chip); ( (EventFilter2D)tracker ).addObserver(this); setEnclosedFilterChain(new FilterChain(chip)); getEnclosedFilterChain().add((EventFilter2D)tracker); ( (EventFilter2D)tracker ).setEnclosed(true,this); ( (EventFilter2D)tracker ).setFilterEnabled(isFilterEnabled()); } @Override public EventPacket<?> filterPacket(EventPacket<?> in) { out = tracker.filterPacket(in); return out; } @Override public void initFilter() { tracker.initFilter(); endTimePrevGesture = 0; lpf.setTauMs(tauPathMs); } @Override public void resetFilter() { tracker.resetFilter(); endTimePrevGesture = 0; lpf.setTauMs(tauPathMs); } @Override public synchronized void setFilterEnabled (boolean filterEventsEnabled){ super.setFilterEnabled(filterEventsEnabled); if ( hmmDP != null ){ if ( filterEventsEnabled ){ hmmDP.setVisible(true); } else{ hmmDP.setVisible(false); } } } @Override public void annotate(GLAutoDrawable drawable) { // do nothing } /** * load gesture Images */ protected final void loadGestureImages(){ Toolkit myToolkit = Toolkit.getDefaultToolkit(); imgHi = myToolkit.getImage(pathGesturePictures + "hi.jpg"); hmmDP.putImage(imgHi); imgBye = myToolkit.getImage(pathGesturePictures + "bye.jpg"); hmmDP.putImage(imgBye); imgLeft = myToolkit.getImage(pathGesturePictures + "left.jpg"); hmmDP.putImage(imgLeft); imgRight = myToolkit.getImage(pathGesturePictures + "right.jpg"); hmmDP.putImage(imgRight); imgUp = myToolkit.getImage(pathGesturePictures + "up.jpg"); hmmDP.putImage(imgUp); imgDown = myToolkit.getImage(pathGesturePictures + "Down.jpg"); hmmDP.putImage(imgDown); imgCW = myToolkit.getImage(pathGesturePictures + "clockwise.jpg"); hmmDP.putImage(imgCW); imgCCW = myToolkit.getImage(pathGesturePictures + "counterclockwise.jpg"); hmmDP.putImage(imgCCW); imgCheck = myToolkit.getImage(pathGesturePictures + "check.jpg"); hmmDP.putImage(imgCheck); // imgPush = myToolkit.getImage(pathGesturePictures + "push.jpg"); // hmmDP.putImage(imgPush); } @Override public void update(Observable o, Object arg) { if ( o instanceof BlurringFilter2DTracker ){ List<BlurringFilter2DTracker.Cluster> cl = tracker.getClusters(); ArrayList<ClusterPathPoint> path = selectClusterTrajectory(cl); if(path != null){ if(login){ // estimates the best matching gesture String bmg = estimateGesture(path); System.out.println("Best matching gesture is " + bmg); if(afterRecognitionProcess(bmg, path)){ endTimePrevGesture = endTimeGesture; } else { storePath(path); } } else { if(detectStartingGesture(path)){ System.out.println("Gesture recognition system is enabled."); afterRecognitionProcess("Infinite", path); } else { storePath(path); } } } } } public void storePath(ArrayList<ClusterPathPoint> path){ prevPath = new ArrayList<ClusterPathPoint>(); for(ClusterPathPoint pt:path){ ClusterPathPoint clonePt = new ClusterPathPoint(pt.x, pt.y, pt.t, pt.getNEvents()); clonePt.stereoDisparity = pt.stereoDisparity; if(clonePt.velocityPPT != null){ clonePt.velocityPPT.x = pt.velocityPPT.x; clonePt.velocityPPT.y = pt.velocityPPT.y; } prevPath.add(clonePt); // prevPath.add((ClusterPathPoint) pt.clone()); } } /** * detects the startinf gesture (ie. 'Infinite' shape) * It tries several times by trimming the input trajectory. * * @param path * @return */ protected boolean detectStartingGesture(ArrayList<ClusterPathPoint> path){ boolean ret = false; String bmg = estimateGesture(path); if(bmg != null){ if(bmg.startsWith("Infinite")){ ret = true; } else if (bmg.startsWith("CW") || bmg.startsWith("CCW")) { if(prevPath != null) ret = tryGestureWithPrevPath(path, 0, "Infinite", checkActivationTimeUs); } else { } } return ret; } public boolean tryGestureWithPrevPath(ArrayList<ClusterPathPoint> path, int prevPathTrimmingPercent, String gestureName, int timeDiffTolerenceUs){ boolean ret = false; if(prevPath != null){ if((startTimeGesture - ((ClusterPathPoint)prevPath.get(prevPath.size()-1)).t) > timeDiffTolerenceUs) return false; ArrayList<ClusterPathPoint> trimmedPath = trajectoryTrimming(prevPath, prevPathTrimmingPercent, 0, FeatureExtraction.calTrajectoryLength(prevPath)); trimmedPath.addAll(path); String bmg = getBestmatchingGesture(trimmedPath, 0); if(bmg != null && bmg.startsWith(gestureName)) ret = true; } return ret; } /** * estimates best matching gesture * It tries several times by trimming the input trajectory. * * @param path * @return */ protected String estimateGesture(ArrayList<ClusterPathPoint> path){ String bmg = getBestmatchingGesture(path, -200); if(bmg == null){ double pathLength = FeatureExtraction.calTrajectoryLength(path); for(int i = 1; i <= 2 ; i++){ for(int j = 0; j<=1; j++){ // retries with the head trimming if failed ArrayList<ClusterPathPoint> trimmedPath = trajectoryTrimming(path, i*headTrimmingPercents/2, j*tailTrimmingPercents, pathLength); if(trimmedPath.size() >= numPointsThreshold && checkSpeedCriterion(trimmedPath)){ bmg = getBestmatchingGesture(trimmedPath, -200 + ((i-1)*2+j+1)*100); } else { if(trimmedPath.size() < numPointsThreshold) // System.out.println("Lack of data points"); if(!checkSpeedCriterion(trimmedPath)) // System.out.println("Under speed limit"); break; } if(bmg != null) return bmg; } } } return bmg; } /** * returns the best matching gesture * * @param path * @return */ private String getBestmatchingGesture(ArrayList<ClusterPathPoint> path, int offset){ String[] codewards = fve.convTrajectoryToCodewords(path); String bmg = hmmDP.ghmm.getBestMatchingGesture(codewards, fve.vectorAngleSeq); /* // draws the quantized vectors if(offset == -200) hmmDP.clearImage(); hmmDP.drawTrajectory(FeatureExtraction.convAnglesToTrajectoryInScaledArea(new Point2D.Float(hmmDP.centerX+offset, hmmDP.centerY+offset), hmmDP.centerY/2, fve.vectorAngleSeq)); // draws the trajectory ArrayList<Point2D.Float> tmpPath = new ArrayList<Point2D.Float>(); for(ClusterPathPoint pt:path) tmpPath.add(new Point2D.Float(pt.x*2 + 200 + offset, pt.y*2)); hmmDP.drawTrajectoryDot(tmpPath); hmmDP.repaint(); System.out.println(offset + ": " + bmg); */ return bmg; } /** * puts an image on the screen based on the result of gesture recognition * * @param bmg * @return */ protected boolean afterRecognitionProcess(String bmg, ArrayList<ClusterPathPoint> path){ if(bmg == null) return false; boolean ret = true; if(login){ if(bmg.startsWith("Infinite")){ doLogout(); } else if(bmg.startsWith("Push")){ doPush(); } else if(bmg.startsWith("SlashUp")){ ret = doSlashUp(path); } else { if(bmg.startsWith("SlashDown")){ doSlashDown(); } else { // doesn't have consider refractory time for CW and CCW if(bmg.startsWith("CW")){ doCW(path); }else if(bmg.startsWith("CCW")){ doCCW(path); } // has to consider refractory time for Left, Right, Up, Down, and Check // doesn't have to consider refractory time if checkActivated is true (i.e. SlashDown is detected) becase SlashDown is a partial gesture if(checkActivated || startTimeGesture >= endTimePrevGesture + refractoryTimeMs*1000){ if(bmg.startsWith("Left")){ doLeft(); }else if(bmg.startsWith("Right")){ doRight(); }else if(bmg.startsWith("Up")){ doUp(); }else if(bmg.startsWith("Down")){ doDown(); }else if(bmg.startsWith("Check")){ doCheck(); } } else { endTimePrevGesture -= (refractoryTimeMs*1000); ret = false; } checkActivated = false; } } } else { if(bmg.startsWith("Infinite")){ doLogin(); } } prevPath = null; return ret; } /** * selects the best trajectory from clusters * * @param cl * @return */ protected ArrayList<ClusterPathPoint> selectClusterTrajectory(List<BlurringFilter2DTracker.Cluster> cl){ ArrayList<ClusterPathPoint> selectedTrj = null; BlurringFilter2DTracker.Cluster selectedCluster = null; int maxNumPoint = 0; // select a candidate trajectory for (BlurringFilter2DTracker.Cluster c: cl){ // doesn't have to check alive cluster if (!c.isDead()){ continue; } // checks number of points if(c.getPath().size() < numPointsThreshold){ continue; } else { // search the largest cluster ArrayList<ClusterPathPoint> path = c.getPath(); if(path.size() > maxNumPoint){ selectedTrj = path; maxNumPoint = path.size(); selectedCluster = c; } } } if(selectedTrj == null) return null; // gesture speed check if(!checkSpeedCriterion(selectedTrj)){ return null; } // low-pass filtering ArrayList<ClusterPathPoint> retTrj = null; if(enableLPF) retTrj = lowPassFiltering(selectedTrj); else retTrj = selectedTrj; // records start and end time of the selected trajectory if(retTrj != null){ startTimeGesture = selectedCluster.getBirthTime(); endTimeGesture = selectedCluster.getLastEventTimestamp(); } return retTrj; } /** * checks speed criterion. * returns true if a certain number of points have velocity higher than maxSpeedThreshold_kPPT * * @param path * @return */ private boolean checkSpeedCriterion(ArrayList<ClusterPathPoint> path){ boolean ret = true; // gesture speed check, At least 5% of the points velocity have to exceed speed threshold. int numValidPoints = Math.max(1, (int) (path.size()*0.05)); for(int i=0; i<path.size(); i++){ ClusterPathPoint point = path.get(i); if(point.velocityPPT != null){ double speed = 1000*Math.sqrt(Math.pow(point.velocityPPT.x, 2.0)+Math.pow(point.velocityPPT.y, 2.0)); if(speed >= maxSpeedThreshold_kPPT) numValidPoints } } if(numValidPoints > 0) ret = false; return ret; } /** * trims a trajectory * * @param trajectory * @param headTrimmingPercets * @param tailTrimmingPercets * @return */ protected ArrayList<ClusterPathPoint> trajectoryTrimming(ArrayList<ClusterPathPoint> trajectory, int headTrimmingPercets, int tailTrimmingPercets, double trjLength){ ArrayList<ClusterPathPoint> trimmedTrj; int numPointsHeadTrimming = 0; int numPointsTailTrimming = 0; // int numPointsHeadTrimming = (int) (trajectory.size()*0.01*headTrimmingPercets); // int numPointsTailTrimming = (int) (trajectory.size()*0.01*tailTrimmingPercets); if(headTrimmingPercets > 0) numPointsHeadTrimming = FeatureExtraction.getTrajectoryPositionForward(trajectory, trjLength*0.01*headTrimmingPercets); if(tailTrimmingPercets > 0) numPointsTailTrimming = trajectory.size() - 1 - FeatureExtraction.getTrajectoryPositionBackward(trajectory, trjLength*0.01*tailTrimmingPercets); if(numPointsHeadTrimming + numPointsTailTrimming > 0 && numPointsHeadTrimming + numPointsTailTrimming < trajectory.size()){ trimmedTrj = new ArrayList<ClusterPathPoint>(trajectory.size() - numPointsHeadTrimming - numPointsTailTrimming); for(int j=numPointsHeadTrimming; j<trajectory.size()-numPointsTailTrimming; j++) trimmedTrj.add(trajectory.get(j)); } else trimmedTrj = trajectory; return trimmedTrj; } /** * does low-pass filtering to smoothe the trajectory * * @param path * @return */ private ArrayList<ClusterPathPoint> lowPassFiltering(ArrayList<ClusterPathPoint> path){ ArrayList<ClusterPathPoint> lpfPath = new ArrayList<ClusterPathPoint>(path.size()); ClusterPathPoint p = (ClusterPathPoint) path.get(0).clone(); lpfPath.add(p); lpf.setInternalValue2d(path.get(0).x, path.get(0).y); for(int i=1; i<path.size(); i++){ p = (ClusterPathPoint) path.get(i).clone(); Point2D.Float pt = lpf.filter2d(p.x, p.y, p.t); p.x = pt.x; p.y = pt.y; lpfPath.add(p); } return lpfPath; } /** * returns maxSpeedThreshold_kPPT * * @return */ public float getMaxSpeedThreshold_kPPT() { return maxSpeedThreshold_kPPT; } /** sets maxSpeedThreshold_kPPT * * @param maxSpeedThreshold_kPPT */ public void setMaxSpeedThreshold_kPPT(float maxSpeedThreshold_kPPT) { float old = this.maxSpeedThreshold_kPPT; this.maxSpeedThreshold_kPPT = maxSpeedThreshold_kPPT; getPrefs().putFloat("GestureBF2D.maxSpeedThreshold_kPPT",maxSpeedThreshold_kPPT); support.firePropertyChange("maxSpeedThreshold_kPPT",old,this.maxSpeedThreshold_kPPT); } /** returns numPointsThreshold * * @return */ public int getNumPointsThreshold() { return numPointsThreshold; } /** sets numPointsThreshold * * @param numPointsThreshold */ public void setNumPointsThreshold(int numPointsThreshold) { int old = this.numPointsThreshold; this.numPointsThreshold = numPointsThreshold; getPrefs().putInt("GestureBF2D.numPointsThreshold",numPointsThreshold); support.firePropertyChange("numPointsThreshold",old,this.numPointsThreshold); } /** returns headTrimmingPercents * * @return */ public int getHeadTrimmingPercents() { return headTrimmingPercents; } /** sets headTrimmingPercents * * @param headTrimmingPercents */ public void setHeadTrimmingPercents(int headTrimmingPercents) { int old = this.headTrimmingPercents; this.headTrimmingPercents = headTrimmingPercents; getPrefs().putInt("GestureBF2D.headTrimmingPercents",headTrimmingPercents); support.firePropertyChange("headTrimmingPercents",old,this.headTrimmingPercents); } /** returns tailTrimmingPercents * * @return */ public int getTailTrimmingPercents() { return tailTrimmingPercents; } /** sets tailTrimmingPercents * * @param tailTrimmingPercents */ public void setTailTrimmingPercents(int tailTrimmingPercents) { int old = this.tailTrimmingPercents; this.tailTrimmingPercents = tailTrimmingPercents; getPrefs().putInt("GestureBF2D.tailTrimmingPercents",tailTrimmingPercents); support.firePropertyChange("tailTrimmingPercents",old,this.tailTrimmingPercents); } /** returns enableLPF * * @return */ public boolean isEnableLPF() { return enableLPF; } /** sets enableLPF * * @param enableLPF */ public void setEnableLPF(boolean enableLPF) { boolean old = this.enableLPF; this.enableLPF = enableLPF; getPrefs().putBoolean("GestureBF2D.enableLPF", enableLPF); support.firePropertyChange("enableLPF",old,this.enableLPF); } /** * @return the tauMs */ public float getTauPathMs (){ return tauPathMs; } /** * The lowpass time constant of the trajectory. * * @param tauPathMs the tauMs to set */ synchronized public void setTauPathMs (float tauPathMs){ float old = this.tauPathMs; this.tauPathMs = tauPathMs; getPrefs().putFloat("GestureBF2D.tauPathMs",tauPathMs); support.firePropertyChange("tauPathMs",old,this.tauPathMs); lpf.setTauMs(tauPathMs); } /** * returns refractoryTimeMs * * @return */ public int getRefractoryTimeMs() { return refractoryTimeMs; } /** * sets refractoryTimeMs * * @param refractoryTimeMs */ public void setRefractoryTimeMs(int refractoryTimeMs) { int old = this.refractoryTimeMs; this.refractoryTimeMs = refractoryTimeMs; getPrefs().putInt("GestureBF2D.refractoryTimeMs", refractoryTimeMs); support.firePropertyChange("refractoryTimeMs",old,this.refractoryTimeMs); } /** * Class for HMM and GUI */ class HmmDrawingPanel extends TrajectoryDrawingPanel implements ItemListener{ /** * Button names */ public final String REMOVE = "Remove"; public final String ADD = "Add"; public final String SHOW = "Show"; public final String RESET = "Reset"; public final String LEARN = "Learn"; public final String GUESS = "Guess"; /** * Optional HMM models */ public final String ERGODIC = "ERGODIC"; public final String LR = "LR"; public final String LRB = "LRB"; public final String LRC = "LRC"; public final String LRBC = "LRBC"; /** * Stores gesture names in a set to guarantee the uniqueness of names */ public HashSet<String> gestureItems = new HashSet<String>(); /** * combo box for choosing a gesture from the registered gesture set */ protected JComboBox gestureChoice; /** * combo box for choosing a HMM model */ protected JComboBox hmmModelChoice; /** * text field for entering the name of a new gesture to register */ protected JTextField newGesture; /** * for saving and loading of gesture HMM */ protected JFileChooser fileChooser; /** * make it true to manually activate gesture recognition system */ protected JCheckBoxMenuItem checkGestureAction; /** * All gestures have the same number of states. */ protected int numState = 5; /** * Feature vector space consists of 16 quantized vectors. */ protected final String[] featureVectorSpace = new String[] {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15"}; /** * use dynamic threshold model. If you set 'false' instead of 'true', you can use a static threshold model. */ protected GestureHmm ghmm = new GestureHmm(featureVectorSpace, GestureHmm.GAUSSIAN_THRESHOLD); /** * Output statement buffer */ protected String msg = ""; /** * x of the center x of image panel */ protected float centerX = imgPanelWidth/2; /** * y of the center of image panel */ protected float centerY = imgPanelHeight/2; /** * size of show panel */ protected float showPanelSize = Math.min(centerX, centerY); /** * timer for image load */ protected Timer timer; /** * constructor * @param title * @param buttonNames */ public HmmDrawingPanel(String title, String[] buttonNames) { super(title, 700, 700, buttonNames); //creates a file chooser fileChooser = new JFileChooser(); // creates a timer timer = new Timer(700, clearImageAction); } @Override public void buttonLayout(String[] componentNames) { gestureChoice = new JComboBox(); gestureChoice.setName("gestureChoice"); gestureChoice.addItem("Select a gesture"); hmmModelChoice = new JComboBox(); hmmModelChoice.setName("hmmModelChoice"); hmmModelChoice.addItem("Select HMM model"); hmmModelChoice.addItem(ERGODIC); hmmModelChoice.addItem(LR); hmmModelChoice.addItem(LRB); hmmModelChoice.addItem(LRC); hmmModelChoice.addItem(LRBC); newGesture = new JTextField(); newGesture.setText("New gesture name"); // configuration of button panel buttonPanel.setLayout(new GridLayout(2, (componentNames.length+3)/2)); // adds gesture choice buttonPanel.add(gestureChoice, "1"); gestureChoice.addItemListener(this); // adds new gesture name buttonPanel.add(newGesture, "2"); // adds HMM model choice buttonPanel.add(hmmModelChoice, "3"); hmmModelChoice.addItemListener(this); // adds buttons JButton newButton; for(int i = 0; i< componentNames.length; i++){ newButton = new JButton(componentNames[i]); buttonPanel.add(newButton, ""+(i+4)); newButton.addActionListener(buttonActionListener); } JButton clearButton = new JButton(clearButtonName); buttonPanel.add(clearButton, ""+ (componentNames.length + 4)); clearButton.addActionListener(buttonActionListener); } @Override public void menuLayout() { // creates and adds drop down menus to the menu bar JMenu fileMenu = new JMenu("File"); menuBar.add(fileMenu); JMenu gestureMenu = new JMenu("Gesture"); menuBar.add(gestureMenu); // creates and adds menu items to menus JMenuItem newAction = new JMenuItem("New"); JMenuItem loadAction = new JMenuItem("Load"); JMenuItem saveAction = new JMenuItem("Save"); fileMenu.add(newAction); fileMenu.add(loadAction); fileMenu.add(saveAction); // Create and add CheckButton for enabling gesture recognition checkGestureAction = new JCheckBoxMenuItem("Activates Gesture Recognition"); checkGestureAction.setState(login); gestureMenu.add(checkGestureAction); // add action listeners newAction.addActionListener(menuActionListener); loadAction.addActionListener(menuActionListener); saveAction.addActionListener(menuActionListener); checkGestureAction.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { AbstractButton aButton = (AbstractButton) e.getSource(); if(aButton.getModel().isSelected()){ login = true; System.out.println("Gesture recognition is mannually activated."); }else{ login = false; System.out.println("Gesture recognition is mannually Inactivated."); } clearImage(); } }); } @Override public void buttonAction(String buttonName) { if(buttonName.equals(LEARN)){ doLearn(); clearImage(); } else if(buttonName.equals(ADD)){ doAddGesture(); } else if(buttonName.equals(REMOVE)){ doRemoveGesture(); } else if(buttonName.equals(GUESS)){ doGuess(); } else if(buttonName.equals(RESET)){ doReset(); clearImage(); } else if(buttonName.equals(SHOW)){ doShow(); } } @Override @SuppressWarnings("CallToThreadDumpStack") public void menuAction(String menuName) { if(menuName.equals("New")){ doNew(); } else if(menuName.equals("Load")){ try{ doLoad(); } catch(ClassNotFoundException e){ e.printStackTrace(); } } else if(menuName.equals("Save")){ doSave(); } repaint(); } /** * excutes Remove button */ public void doRemoveGesture(){ String gesName = (String) gestureChoice.getSelectedItem(); if(gesName == null || gesName.equals("") || gesName.equals("Select a gesture")){ System.out.println("Warning: Gesture is not selected."); return; } ghmm.removeGesture(gesName); gestureChoice.removeItem(gesName); gestureItems.remove(gesName); System.out.println(gesName + " was removed."); } /** * excutes Add button */ public void doAddGesture(){ String newGestName = newGesture.getText(); if(newGestName.equals("")){ System.out.println("Warning: Gesture name is not specified."); return; } if(((String) hmmModelChoice.getSelectedItem()).startsWith("Select HMM model")) { System.out.println("Warning: HMM model is not specified."); return; } String gestName = newGestName+"_"+hmmModelChoice.getSelectedItem(); if(!gestureItems.contains(gestName)){ gestureItems.add(gestName); gestureChoice.addItem(gestName); HiddenMarkovModel.ModelType selectedModel; if(hmmModelChoice.getSelectedItem().equals("ERGODIC")) selectedModel = HiddenMarkovModel.ModelType.ERGODIC_RANDOM; else if(hmmModelChoice.getSelectedItem().equals("LR")) selectedModel = HiddenMarkovModel.ModelType.LR_RANDOM; else if(hmmModelChoice.getSelectedItem().equals("LRB")) selectedModel = HiddenMarkovModel.ModelType.LRB_RANDOM; else if(hmmModelChoice.getSelectedItem().equals("LRC")) selectedModel = HiddenMarkovModel.ModelType.LRC_RANDOM; else if(hmmModelChoice.getSelectedItem().equals("LRBC")) selectedModel = HiddenMarkovModel.ModelType.LRBC_RANDOM; else{ System.out.println("Warning: Failed to add a new gesture."); return; } ghmm.addGesture(gestName, numState, selectedModel); ghmm.initializeGestureRandom(gestName); System.out.println("A new gesture ("+ gestName + ") is added."); } gestureChoice.setSelectedItem(gestName); newGesture.setText(""); } /** * excutes Learn button */ public void doLearn(){ String gesName = (String) gestureChoice.getSelectedItem(); if(gesName == null || gesName.equals("") || gesName.equals("Select a gesture")){ System.out.println("Warning: Gesture is not selected."); return; } String[] fv = fve.convTrajectoryToCodewords(trajectory); if(fv[0] == null){ System.out.println("Warning: No trajectory is dected."); return; } System.out.println("Learning " + gesName); boolean learningSuccess; HiddenMarkovModel.ModelType modelType = ghmm.getGestureHmm(gesName).getModelType(); // for LRC & LRBC, we don't have to update start probability if(modelType == HiddenMarkovModel.ModelType.LRC_RANDOM || modelType == HiddenMarkovModel.ModelType.LRBC_RANDOM) learningSuccess = ghmm.learnGesture(gesName, fv, fve.vectorAngleSeq, false, true, true); else learningSuccess = ghmm.learnGesture(gesName, fv, fve.vectorAngleSeq, true, true, true); if(learningSuccess){ if(ghmm.getGestureHmm(gesName).getNumTraining() == 1) System.out.println(gesName+" is properly registered. Log{P(O|model)} = " + Math.log10(ghmm.getGestureLikelyhood(gesName, fv))); else if(ghmm.getGestureHmm(gesName).getNumTraining() == 2) System.out.println(gesName+" has been trained twice. Log{P(O|model)} = " + Math.log10(ghmm.getGestureLikelyhood(gesName, fv))); else System.out.println(gesName+" has been trained " + ghmm.getGestureHmm(gesName).getNumTraining() + " times. Log{P(O|model)} = " + Math.log10(ghmm.getGestureLikelyhood(gesName, fv))); // ghmm.printGesture(gesName); // ghmm.printThresholdModel(); // ghmm.getGestureHmm(gesName).viterbi(fv); // System.out.println("Viterbi path : " + ghmm.getGestureHmm(gesName).getViterbiPathString(fv.length)); } } /** * excutes Guess button */ public void doGuess(){ String[] fv = fve.convTrajectoryToCodewords(trajectory); if(fv[0] == null){ System.out.println("Warning: No trajectory is dected."); return; } String bmg = ghmm.getBestMatchingGesture(fv, fve.vectorAngleSeq); gImg.setFont(new Font("Arial", Font.PLAIN, 24)); // erase previous message Color tmpColor = getColor(); gImg.setColor(this.getBackground()); gImg.drawString(msg, 40 + imgPanelWidth/2 - msg.length()*12/2, imgPanelHeight - 20); gImg.setColor(tmpColor); if(bmg == null){ msg = "No gesture is found."; System.out.println(msg); }else{ msg = String.format("Best matching gesture is %s", bmg); System.out.println(msg +" with probability "+Math.log10(ghmm.getGestureLikelyhood(bmg, fv))); // ghmm.getGestureHmm(bmg).viterbi(fv); // System.out.println("Viterbi path : " + ghmm.getGestureHmm(bmg).getViterbiPathString(fv.length)); } gImg.drawString(msg, 40 + imgPanelWidth/2 - msg.length()*12/2, imgPanelHeight - 20); repaint(); resetTrajectory(); } /** * excutes Show button */ public void doShow(){ String gesName = (String) gestureChoice.getSelectedItem(); if(gesName == null || gesName.equals("") || gesName.equals("Select a gesture")){ System.out.println("Warning: Gesture is not selected."); return; } double[] meanFVarray = ghmm.getAverageFeaturesToArray(gesName); clearImage(); // draws frame int margin = 30; int shadow = 10; Color tmp = getColor(); gImg.setColor(Color.DARK_GRAY); gImg.fillRect((int) (centerX - showPanelSize/2) - margin + shadow, (int) (centerY - showPanelSize/2) - margin + shadow, (int) showPanelSize + 2*margin, (int) showPanelSize + 2*margin); gImg.setColor(Color.WHITE); gImg.fillRect((int) (centerX - showPanelSize/2) - margin, (int) (centerY - showPanelSize/2) - margin, (int) showPanelSize + 2*margin, (int) showPanelSize + 2*margin); gImg.setColor(Color.BLACK); gImg.drawRect((int) (centerX - showPanelSize/2) - margin, (int) (centerY - showPanelSize/2) - margin, (int) showPanelSize + 2*margin, (int) showPanelSize + 2*margin); gImg.setFont(new Font("Arial", Font.PLAIN, 24)); gImg.drawString(gesName+" (# of training: "+ghmm.getGestureHmm(gesName).getNumTraining()+")", (int) centerX - (int) showPanelSize/2 - margin, (int) centerY - (int) showPanelSize/2 - margin - 10); gImg.setColor(tmp); // draws trajectory if(ghmm.getGestureHmm(gesName).getNumTraining() > 0) drawTrajectory(FeatureExtraction.convAnglesToTrajectoryInScaledArea(new Point2D.Float(centerX, centerY), showPanelSize, meanFVarray)); else gImg.drawString("Hey, man.", (int) centerX - 50, (int) centerY); repaint(); } /** * excutes Reset button */ public void doReset(){ String gesName = (String) gestureChoice.getSelectedItem(); if(gesName == null || gesName.equals("") || gesName.equals("Select a gesture")){ System.out.println("Warning: Gesture is not selected."); return; } ghmm.resetGesture(gesName); System.out.println(gesName + " is reset now."); } /** * excutes New menu */ public void doNew(){ ghmm = new GestureHmm(featureVectorSpace, GestureHmm.GAUSSIAN_THRESHOLD); gestureItems.clear(); gestureChoice.removeAllItems(); gestureChoice.addItem("Select a gesture"); System.out.println("Created a new gesture set."); } /** * excutes Save menu */ @SuppressWarnings("CallToThreadDumpStack") public void doSave(){ int returnVal = fileChooser.showSaveDialog(HmmDrawingPanel.this); if (returnVal == JFileChooser.APPROVE_OPTION) { File file = fileChooser.getSelectedFile(); // do saving things here try{ FileOutputStream fos = new FileOutputStream(file.getAbsoluteFile()); BufferedOutputStream bos = new BufferedOutputStream(fos); ObjectOutputStream oos = new ObjectOutputStream(bos); oos.writeObject(ghmm); oos.close(); log.log(Level.WARNING, "Gesture HMM has been saved in {0}", file.getAbsoluteFile()); } catch (IOException e){ e.printStackTrace(); } } else { // canceled } } /** * excutes Save menu * * @throws ClassNotFoundException */ @SuppressWarnings("CallToThreadDumpStack") public void doLoad() throws ClassNotFoundException{ int returnVal = fileChooser.showOpenDialog(HmmDrawingPanel.this); if (returnVal == JFileChooser.APPROVE_OPTION) { File file = fileChooser.getSelectedFile(); // do loading things here try{ FileInputStream fis = new FileInputStream(file.getAbsoluteFile()); BufferedInputStream bis = new BufferedInputStream(fis); ObjectInputStream ois = new ObjectInputStream(bis); ghmm = (GestureHmm) ois.readObject(); gestureItems.clear(); gestureItems.addAll(ghmm.getGestureNames()); gestureChoice.removeAllItems(); gestureChoice.addItem("Select a gesture"); for(String gname:gestureItems) gestureChoice.addItem(gname); ois.close(); log.log(Level.WARNING, "Gesture HMM has been loaded in {0}", file.getAbsoluteFile()); // String[] bestSeq = new String[] {"6", "15", "10", "3", "13", "6", "15", "10", "3", "13", "6", "15", "10", "3", "13", "6"}; // String[] idealSeq = new String[] {"12", "13", "14", "15", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11"}; // String[] localSeq = new String[] {"12", "13", "14", "15", "0", "1", "2", "3", "13", "14", "15", "0", "1", "2", "3", "4"}; // System.out.println("bestSeq = " + ghmm.getGestureHmm("CW_LRBC").forward(bestSeq)); // System.out.println("idealSeq = " + ghmm.getGestureHmm("CW_LRBC").forward(idealSeq)); // System.out.println("localSeq = " + ghmm.getGestureHmm("CW_LRBC").forward(localSeq)); } catch (IOException e){ e.printStackTrace(); } } else { // canceled } } /** * puts an image on the drawing panel * * @param img */ public void putImage(Image img){ clearImage(); gImg.drawImage(img, (int) centerX - img.getWidth(this)/2, (int) centerY - img.getHeight(this)/2, this); repaint(); if(timer.isRunning()) timer.restart(); else timer.start(); } /** * action listener for timer events */ ActionListener clearImageAction = new ActionListener() { @Override public void actionPerformed(ActionEvent evt) { clearImage(); timer.stop(); } }; @Override protected void initialDeco() { super.initialDeco(); Color tmpColor = getColor(); Font tmpFont = getFont(); gImg.setFont(new Font("Arial", Font.BOLD|Font.ITALIC, 20)); if(login){ gImg.setColor(Color.RED); gImg.drawString("Active", imgPanelWidth - 100, 20); }else{ gImg.setColor(Color.GRAY); gImg.drawString("Inactive", imgPanelWidth - 100, 20); } gImg.setColor(tmpColor); gImg.setFont(tmpFont); } /** * processes Choice events * @param e */ @Override public void itemStateChanged(ItemEvent e) { if(String.valueOf(e.getSource()).contains("gestureChoice")){ if(e.getStateChange() == ItemEvent.SELECTED && !String.valueOf(e.getItem()).equals("Select a gesture")){ System.out.println("Gesture selection : " + e.getItem() + " is selected."); } } else { if(e.getStateChange() == ItemEvent.SELECTED && !String.valueOf(e.getItem()).equals("Select HMM model")){ System.out.println("HMM model selection: " + e.getItem() + " is selected."); } } } @Override public void windowClosing(WindowEvent we) { // set the window just invisible hmmDP.setVisible(false); } } /** * returns true if login is true * * @return */ public boolean isLogin() { return login; } /** * Definition of after-gesture processes */ protected void doLogin(){ hmmDP.putImage(imgHi); login = true; hmmDP.checkGestureAction.setState(login); } protected void doLogout(){ hmmDP.putImage(imgBye); login = false; hmmDP.checkGestureAction.setState(login); } protected void doPush(){ // for stereo vision // hmmDP.putImage(imgPush); } protected boolean doSlashUp(ArrayList<ClusterPathPoint> path){ boolean ret = true; // checks over-segmentation if(!checkActivated && prevPath != null){ System.out.print("Check the previous segment if(tryGestureWithPrevPath(path, 60, "Check", checkActivationTimeUs)) { System.out.println("Check"); checkActivated = true; } else { System.out.println("null"); ret = false; } } if(ret && checkActivated) hmmDP.putImage(imgCheck); checkActivated = false; return ret; } protected void doSlashDown(){ checkActivated = true; } protected void doCW(ArrayList<ClusterPathPoint> path){ // to detect broken infinite shaped gestures if(tryGestureWithPrevPath(path, 0, "Infinite", checkActivationTimeUs)){ System.out.println("----> might be an infinite-shaped gesture"); doLogout(); } else hmmDP.putImage(imgCW); } protected void doCCW(ArrayList<ClusterPathPoint> path){ // to detect broken infinite shaped gestures if(tryGestureWithPrevPath(path, 0, "Infinite", checkActivationTimeUs)){ System.out.println("----> might be an infinite-shaped gesture"); doLogout(); } else hmmDP.putImage(imgCCW); } protected void doLeft(){ hmmDP.putImage(imgLeft); } protected void doRight(){ hmmDP.putImage(imgRight); } protected void doUp(){ // if(checkActivated && startTimeGesture <= endTimePrevGesture + checkActivationTimeUs) // hmmDP.putImage(imgCheck); // else hmmDP.putImage(imgUp); } protected void doDown(){ hmmDP.putImage(imgDown); } protected void doCheck(){ hmmDP.putImage(imgCheck); } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package ch.unizh.ini.jaer.projects.neuralnets; import java.io.*; import java.text.DecimalFormat; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.JFileChooser; import javax.swing.JFrame; import javax.swing.JPanel; import jspikestack.AxonSparse; import jspikestack.EngineeringFormat; import jspikestack.KernelMaker2D; import jspikestack.KernelMaker2D.FloatConnection; import net.sf.jaer.Description; import net.sf.jaer.chip.AEChip; import net.sf.jaer.event.EventPacket; import net.sf.jaer.event.OutputEventIterator; import net.sf.jaer.event.PolarityEvent; import net.sf.jaer.eventprocessing.EventFilter2D; import net.sf.jaer.eventprocessing.MultiSourceProcessor; import net.sf.jaer.graphics.ImageDisplay; /** * * @Desc This filter attempts to fill in background shading information by * remembering the most recent events that passed. For instance, if an edge passes, * indicating a transition from light to dark, and you've got no on events since * then, it's a good assumption that things are still dark. * * The implementation is reasonable biologically plausible. It uses an array of * Leaky Integrate-and-Fire neurons (one neuron per pixel). The recent * shading information is stored in the form of slowly post-synaptic currents. * When an edge passes, it adds to these currents. The currents continue to * stimulate a neuron long after the edge passes. * * * @author Peter O'Connor */ @Description("Reconstructs an Estimation of the Background Light level based on the streams of input events as a lowpass filter.") public class SlowResponse extends EventFilter2D { int dimx; int dimy; private float contrast=1; ImageDisplay im; public float[] state1; public float[] state2; public int[] lastUpdateTime; private boolean applyLateralKernel=true; private boolean applyForwardKernel=false; public LoneKernelController forwardKernelControl; public LoneKernelController lateralKernelControl; // private float smoothingFactor=0.5f; private boolean built=false; // private int kernelWidth=3; private float timeConst= getFloat("timeConst", 500000); AxonSparse.KernelController kernelControl; DataOutputStream dos; public boolean recordToFile; float[][] fwdKernel; int[][] fwdTargets; float[][] autoKernel; int[][] autoTargets; boolean isTimeInitialized=false; public float getTimeConst() { return timeConst; } public void setTimeConst(float tcEPSC) { this.timeConst = tcEPSC; updateEpscScale(); } float epscDecayRate; /** We'd like to make it so each epsc, everall, adds has an area of 1 under it. */ public void updateEpscScale() { epscDecayRate=1/timeConst; } public void updateState(int toTime) { for (int i=0; i<state1.length; i++) { updateState(toTime,i); } if (isApplyLateralKernel()) { KernelMaker2D.weightMult(state1, autoKernel, autoTargets, state2); float[] swap=state1; state1=state2; state2=swap; } } public void updateState(int toTime,int ixUnit) { state1[ixUnit]*=Math.exp((lastUpdateTime[ixUnit]-toTime)/timeConst); lastUpdateTime[ixUnit]=toTime; } public void fireEventToUnit(PolarityEvent evin) { // Old system: Fire directly to unit if(applyForwardKernel) { int ix=evin.y+dimy*evin.x; for (int i=0; i<fwdTargets[ix].length;i++) { updateState(evin.timestamp,fwdTargets[ix][i]); state1[ix]+=evin.getPolarity()==PolarityEvent.Polarity.On?fwdKernel[ix][i]:-fwdKernel[ix][i]; } } else { int addr=dim2addr(evin.x,evin.y); updateState(evin.timestamp,addr); // state1[addr]+=evin.getPolarity()==PolarityEvent.Polarity.On?epscDecayRate:-epscDecayRate; state1[addr]+=evin.getPolarity()==PolarityEvent.Polarity.On?1:-1; } // New System: Fire through kernel. } public SlowResponse(AEChip chip) { super(chip); setPropertyTooltip("thresh", "Firing threshold of slow-response neurons"); setPropertyTooltip("tcEPSC", "Time-Constant of the EPSCs, in microseconds. Longer TC means shading is more persistent"); setPropertyTooltip("tcMem", "Time-Constant of the neuron membrane potentials, in microseconds."); setPropertyTooltip("eventBased", "Generate events indicating slow-response activity."); updateEpscScale(); } @Override public EventPacket<?> filterPacket(EventPacket<?> in) { // Check if Built if (!isBuilt()) return in; else if (!isTimeInitialized) // Check if initialized { if (in.isEmpty()) return in; initNeuronsOnTimeStamp(in.getFirstTimestamp()); isTimeInitialized=true; } // Fire Events for (Object ev:in) { fireEventToUnit((PolarityEvent)ev); } // if (in.isEmpty()) // return out; // Update the state and display int time=in.getLastTimestamp(); updateState(time); updateDisplay(); // If requested, write to file. if (recordToFile) this.writeCurrentFrame(); return in; } public boolean isBuilt() { return built; } @Override public void resetFilter() { removeDisplays(); if (isTimeInitialized) build(); isTimeInitialized=false; } public void doStartDisplay() { build(); initDisplay(); } public void build() { dimx=getChip().getSizeX(); dimy=getChip().getSizeY(); int nUnits=dimx*dimy; state1=new float[nUnits]; state2=new float[nUnits]; lastUpdateTime=new int[nUnits]; buildKernel(); built=true; } public void buildKernel() { // Lateral Kernel KernelMaker2D.Gaussian kern=new KernelMaker2D.Gaussian(); kern.majorWidth=1f; if (lateralKernelControl==null) lateralKernelControl=new LateralKernelController(); lateralKernelControl.setKernelControl(kern, 3, 3); lateralKernelControl.doApply_Kernel(); this.addControls(lateralKernelControl.getControl()); // Forward Kernel KernelMaker2D.MexiHat kernf=new KernelMaker2D.MexiHat(); kernf.mag=1; kernf.ratio=3; kernf.majorWidth=3; if (forwardKernelControl==null) forwardKernelControl=new ForwardKernelController(); forwardKernelControl.setKernelControl(kernf, 5, 5); forwardKernelControl.doApply_Kernel(); this.addControls(forwardKernelControl.getControl()); } public float getContrast() { return contrast; } public void setContrast(float contrast) { this.contrast = contrast; } public boolean isApplyForwardKernel() { return applyForwardKernel; } public void setApplyForwardKernel(boolean applyForwardKernel) { this.applyForwardKernel = applyForwardKernel; } public class ForwardKernelController extends LoneKernelController { @Override public void doApply_Kernel() { float[][] ww=get2DKernel(); // KernelMaker2D.normalizeKernel(ww); FloatConnection conn=KernelMaker2D.invert(ww, chip.getSizeX(), chip.getSizeY(), SlowResponse.this.dimx, SlowResponse.this.dimy); fwdKernel=conn.weights; fwdTargets=conn.targets; } @Override public String getName() { return "Forward Kernel"; } } public class LateralKernelController extends LoneKernelController { @Override public void doApply_Kernel() { float[][] ww=get2DKernel(); KernelMaker2D.normalizeKernel(ww); FloatConnection conn=KernelMaker2D.invert(ww, SlowResponse.this.dimx, SlowResponse.this.dimy, SlowResponse.this.dimx, SlowResponse.this.dimy); autoKernel=conn.weights; autoTargets=conn.targets; } @Override public String getName() { return "Lateral Kernel"; } } public void initDisplay() { im=ImageDisplay.createOpenGLCanvas(); im.setImageSize(dimx,dimy); im.setSize(400,400); JPanel p=new JPanel(); p.add(im); this.addDisplay(p); // JFrame j=new JFrame(); // j.getContentPane().add(im); // j.setVisible(true); } float displayMin; float displayMax; float displayAdaptationRate=0.1f; boolean displaySymmetric=true; final EngineeringFormat myFormatter = new EngineeringFormat(); public void updateDisplay() { if (im==null) { return; } float minAct=Float.MAX_VALUE; float maxAct=Float.MIN_VALUE; float del=(displayMax-displayMin)/contrast; float bottom=displayMin/contrast; for (int i=0; i<state1.length; i++) { float vmem=state1[i]; float level=(vmem-bottom)/del; im.setPixmapGray(i/dimy, i%dimy, level); minAct=minAct<vmem?minAct:vmem; maxAct=maxAct>vmem?maxAct:vmem; } if (displaySymmetric) { float absmax=Math.max(Math.abs(minAct),Math.abs(maxAct)); minAct=-absmax; maxAct=absmax; } if (displayMin==0 && displayMax==0) { displayMin=minAct; displayMax=maxAct; } else { displayMin=displayAdaptationRate*minAct+(1-displayAdaptationRate)*displayMin; displayMax=displayAdaptationRate*maxAct+(1-displayAdaptationRate)*displayMax; } im.setTitleLabel("range: ["+myFormatter.format(minAct)+" "+myFormatter.format(maxAct)+"]"); im.repaint(); } @Override public void initFilter() { } void initNeuronsOnTimeStamp(int timestamp) { for (int i=0; i<lastUpdateTime.length; i++) { lastUpdateTime[i]=timestamp; } } public int dim2addr(short xloc, short yloc) { return yloc+xloc*dimy; } public File selectFile() { this.setFilterEnabled(false); JFileChooser fileChooser = new JFileChooser(System.getProperty("user.dir")); int returnVal = fileChooser.showSaveDialog(null); File file=fileChooser.getSelectedFile(); this.setFilterEnabled(true); return file; } public void initFile(File thisFile) { if (thisFile==null) { thisFile=selectFile(); } FileOutputStream fos; if (thisFile==null) return; try { fos=new FileOutputStream(thisFile); dos=new DataOutputStream(fos); dos.writeInt(dimx); dos.writeInt(dimy); } catch (FileNotFoundException ex) { Logger.getLogger(SlowResponse.class.getName()).log(Level.SEVERE, null, ex); }catch (IOException ex) { Logger.getLogger(SlowResponse.class.getName()).log(Level.SEVERE, null, ex); } } /** Write the current frame to file. Assumes state has been updated already */ public void writeCurrentFrame() { if (dos==null) initFile(null); try { for(int i=0; i<state1.length; i++) { dos.writeFloat(state1[i]); } } catch (IOException ex) { setRecordToFile(false); Logger.getLogger(SlowResponse.class.getName()).log(Level.SEVERE, null, ex); } } public boolean isApplyLateralKernel() { return applyLateralKernel; } public void setApplyLateralKernel(boolean doSmoothing) { this.applyLateralKernel = doSmoothing; } // public float getSmoothingFactor() { // return smoothingFactor; // public void setSmoothingFactor(float smoothingFactor) { // this.smoothingFactor = smoothingFactor; // buildKernel(); // public int getKernelWidth() { // return kernelWidth; // public void setKernelWidth(int kernelWidth) { // this.kernelWidth = kernelWidth; // buildKernel(); public boolean isRecordToFile() { return recordToFile; } public void setRecordToFile(boolean recordToFile) { this.recordToFile = recordToFile; if (!recordToFile) { try { dos.close(); dos=null; } catch (IOException ex) { Logger.getLogger(SlowResponse.class.getName()).log(Level.SEVERE, null, ex); } } } }
package cn.ecailan.esy.activity; import android.app.Activity; import android.content.Intent; import android.graphics.drawable.Drawable; import android.os.Bundle; import android.view.View; import android.widget.AdapterView; import android.widget.Button; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ListView; import android.widget.TextView; import org.json.JSONObject; import org.ry8.CeaFrame.model.BusinessResponse; import org.ry8.CeaFrame.theme.ResourcesFactory; import org.ry8.external.androidquery.callback.AjaxStatus; import java.util.ArrayList; import java.util.List; import cn.ecailan.esy.R; import cn.ecailan.esy.adapter.PlantSelecAreaAdapter; import cn.ecailan.esy.adapter.PlantSelectProductAdapter; import cn.ecailan.esy.model.SendMsgRequest; public class PlantSelectProductActivity extends Activity implements BusinessResponse{ private TextView title; private FrameLayout nav_bar; private LinearLayout backUpPage; private SendMsgRequest sendMsgRequest; private ListView listView; private PlantSelectProductAdapter plantSelectProductAdapter; private String areaidg; private String areaname; private String productId; private String productName; private String tranco; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.plant_product_layout); areaidg = getIntent().getStringExtra("areaidg"); areaname = getIntent().getStringExtra("areaname"); sendMsgRequest = new SendMsgRequest(this); sendMsgRequest.addResponseListener(this); sendMsgRequest.getProduct(); initView(); } private void initView() { title = (TextView)findViewById(R.id.topview_title); title.setText(""); nav_bar = (FrameLayout)findViewById(R.id.nav_bar); Drawable drawable = ResourcesFactory.getDrawable(getResources(), R.drawable.nav_background); if (null != drawable) { nav_bar.setBackgroundDrawable(drawable); } backUpPage = (LinearLayout)findViewById(R.id.close); backUpPage.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { finish(); } }); listView =(ListView)findViewById(R.id.listView); plantSelectProductAdapter = new PlantSelectProductAdapter(this,sendMsgRequest.arrayProductList); listView.setAdapter(plantSelectProductAdapter); listView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { productName = sendMsgRequest.arrayProductList.get(position).get("name").toString(); productId = sendMsgRequest.arrayProductList.get(position).get("productidg").toString(); tranco = areaname + "," + productName; sendMsgRequest.addPlant(tranco,areaidg,productId); finish(); } }); } @Override public void OnMessageResponse(String url, JSONObject jo, AjaxStatus status) { if ("add_plant".equals(status.getRequestCode())){ } } }
package com.dexvis.dex.task.tablemanipulation; import org.simpleframework.xml.Root; import com.dexvis.dex.exception.DexException; import com.dexvis.dex.wf.DexTask; import com.dexvis.dex.wf.DexTaskState; @Root public class AddRowNumber extends DexTask { public AddRowNumber() throws DexException { super("Table Manipulation", "Add Row Number", "table_manipulation/AddRowNumber.html"); getMetaData().setTaskExecutionUpdatesUI(false); } public DexTaskState execute(DexTaskState state) throws DexException { state.getDexData().getHeader().add(0, "RI"); for (int row = 0; row < state.getDexData().getData().size(); row++) { state.getDexData().getData().get(row).add(0, "" + (row + 1)); } return state; } }
package com.ibm.nmon.gui.interval; import javax.swing.AbstractAction; import java.awt.event.ActionEvent; import java.awt.Component; import javax.swing.JOptionPane; import com.ibm.nmon.gui.main.NMONVisualizerGui; /** * Action listener that removes all intervals from the IntervalManager. */ public final class RemoveAllIntervalsAction extends AbstractAction { private static final long serialVersionUID = -4929882417533803013L; private final NMONVisualizerGui gui; private final Component parent; public RemoveAllIntervalsAction(NMONVisualizerGui gui, Component parent) { this.gui = gui; this.parent = parent; } @Override public void actionPerformed(ActionEvent e) { if (gui.getIntervalManager().getIntervalCount() > 0) { if (JOptionPane.showConfirmDialog(parent, "Are you sure?", "Remove All Intervals", JOptionPane.YES_NO_OPTION) == JOptionPane.OK_OPTION) { gui.getIntervalManager().clearIntervals(); } } } }
package com.interview.dynamic; public class LongestIncreasingSubsequence { /** * DP way of solving LIS */ public int longestSubsequenceWithActualSolution(int arr[]){ int T[] = new int[arr.length]; int actualSolution[] = new int[arr.length]; for(int i=0; i < arr.length; i++){ T[i] = 1; actualSolution[i] = i; } for(int i=1; i < arr.length; i++){ for(int j=0; j < i; j++){ if(arr[i] > arr[j]){ if(T[j] + 1 > T[i]){ T[i] = T[j] + 1; //set the actualSolution to point to guy before me actualSolution[i] = j; } } } } //find the index of max number in T int maxIndex = 0; for(int i=0; i < T.length; i++){ if(T[i] > T[maxIndex]){ maxIndex = i; } } //lets print the actual solution int t = maxIndex; int newT = maxIndex; do{ t = newT; System.out.print(arr[t] + " "); newT = actualSolution[t]; }while(t != newT); System.out.println(); return T[maxIndex]; } /** * Recursive way of solving LIS */ public int longestSubsequenceRecursive(int arr[]){ int maxLen = 0; for(int i=0; i < arr.length-1; i++){ int len = longestSubsequenceRecursive(arr,i+1,arr[i]); if(len > maxLen){ maxLen = len; } } return maxLen + 1; } private int longestSubsequenceRecursive(int arr[], int pos, int lastNum){ if(pos == arr.length){ return 0; } int t1 = 0; if(arr[pos] > lastNum){ t1 = 1 + longestSubsequenceRecursive(arr, pos+1, arr[pos]); } int t2 = longestSubsequenceRecursive(arr, pos+1, lastNum); return Math.max(t1, t2); } public static void main(String args[]){ LongestIncreasingSubsequence lis = new LongestIncreasingSubsequence(); int arr[] = {23,10,22,5,33,8,9,21,50,41,60,80,99, 22,23,24,25,26,27}; int result = lis.longestSubsequenceWithActualSolution(arr); int result1 = lis.longestSubsequenceRecursive(arr); System.out.println(result); System.out.println(result1); } }
package com.quollwriter.editors.ui.sidebars; import java.awt.Dimension; import java.awt.Component; import java.awt.Font; import java.awt.Color; import java.awt.event.*; import java.awt.Image; import java.awt.image.*; import javax.swing.*; import javax.swing.border.*; import javax.swing.event.*; import java.util.List; import java.util.Map; import java.util.HashMap; import java.util.ArrayList; import java.util.Iterator; import java.util.Date; import java.util.Collection; import java.util.Set; import java.util.Vector; import java.util.Arrays; import java.util.LinkedHashSet; import java.util.LinkedHashMap; import org.jivesoftware.smack.*; import org.jivesoftware.smack.packet.*; import com.jgoodies.forms.builder.*; import com.jgoodies.forms.factories.*; import com.jgoodies.forms.layout.*; import com.gentlyweb.utils.*; import com.quollwriter.data.*; import com.quollwriter.data.editors.*; import com.quollwriter.*; import com.quollwriter.ui.*; import com.quollwriter.ui.sidebars.*; import com.quollwriter.ui.panels.*; import com.quollwriter.db.*; import com.quollwriter.events.*; import com.quollwriter.ui.components.ActionAdapter; import com.quollwriter.ui.components.ChangeAdapter; import com.quollwriter.ui.components.Header; import com.quollwriter.ui.components.FormItem; import com.quollwriter.ui.components.DnDTabbedPane; import com.quollwriter.ui.components.TabHeader; import com.quollwriter.ui.components.ScrollableBox; import com.quollwriter.ui.components.QPopup; import com.quollwriter.editors.*; import com.quollwriter.editors.messages.*; import com.quollwriter.editors.ui.*; import com.quollwriter.ui.events.*; public class EditorsSideBar extends AbstractSideBar implements EditorChangedListener, EditorMessageListener, UserOnlineStatusListener { public static final String NAME = "editors"; private DnDTabbedPane tabs = null; private EditorFindPanel editorFindPanel = null; private JComponent noEditors = null; private JComponent firstLogin = null; private JButton statusButton = null; private JComponent notification = null; private EditorsSection otherEditors = null; private EditorsSection invitesForMe = null; private EditorsSection invitesIveSent = null; private Map<String, JComponent> specialTabs = new HashMap (); public EditorsSideBar (AbstractProjectViewer v) { super (v); EditorsEnvironment.addEditorChangedListener (this); EditorsEnvironment.addEditorMessageListener (this); EditorsEnvironment.addUserOnlineStatusListener (this); } @Override public void handleMessage (EditorMessageEvent ev) { this.updateView (); } @Override public void editorChanged (EditorChangedEvent ev) { EditorEditor ed = ev.getEditor (); this.updateView (); for (int i = 0; i < this.tabs.getTabCount (); i++) { Component comp = this.tabs.getComponentAt (i); if (comp instanceof EditorPanel) { if (ed == ((EditorPanel) comp).getEditor ()) { JLabel th = (JLabel) this.tabs.getTabComponentAt (i); BufferedImage av = ed.getDisplayAvatar (); if (av != null) { BufferedImage nim = UIUtils.getScaledImage (av, 28); if (nim.getHeight () > 28) { nim = UIUtils.getScaledImage (av, 28, 28); } th.setIcon (new ImageIcon (nim)); th.setText (null); } else { th.setIcon (null); th.setText (ed.getShortName ()); } String tt = ed.getShortName (); if (ed.getOnlineStatus () != null) { tt += ", " + ed.getOnlineStatus ().getName (); } th.setToolTipText (String.format ("<html>%s</html>", tt)); } } } } public boolean canClose () { return true; } @Override public void onShow () { // TODO } @Override public void onHide () { } @Override public void onClose () { EditorsEnvironment.removeEditorChangedListener (this); EditorsEnvironment.removeUserOnlineStatusListener (this); } public boolean removeOnClose () { return false; } public String getIconType () { return Constants.EDITORS_ICON_NAME; } public String getTitle () { return "{Contacts}"; // {Editors} } public void panelShown (MainPanelEvent ev) { } private void update () { if (!this.isVisible ()) { return; } } private JMenuItem createStatusMenuItem (final EditorEditor.OnlineStatus status) { final EditorsSideBar _this = this; JMenuItem mi = UIUtils.createMenuItem (status.getName (), this.getStatusIconName (status), new ActionListener () { public void actionPerformed (ActionEvent ev) { try { EditorsEnvironment.setUserOnlineStatus (status); } catch (Exception e) { Environment.logError ("Unable to set user status to: " + status, e); UIUtils.showErrorMessage (_this.projectViewer, "Unable to change your status, please contact Quoll Writer support for assistance."); } } }); return mi; } private void updateUserOnlineStatus (EditorEditor.OnlineStatus status) { if (status == null) { status = EditorEditor.OnlineStatus.offline; } String iconName = this.getStatusIconName (status); String toolTip = null; String info = null; if (status == EditorEditor.OnlineStatus.offline) { toolTip = "Click to go online"; info = "You have been logged out."; } else { toolTip = status.getName () + ", click to change your status"; info = String.format ("Your status is now <b>%s</b>.", status.getName ()); } this.statusButton.setIcon (Environment.getIcon (iconName, Constants.ICON_SIDEBAR)); this.statusButton.setToolTipText (toolTip); if (this.statusButton.isShowing ()) { this.showNotification (iconName, info, 2, this.statusButton); } } public void userOnlineStatusChanged (UserOnlineStatusEvent ev) { this.updateUserOnlineStatus (ev.getStatus ()); } @Override public List<JComponent> getHeaderControls () { final EditorsSideBar _this = this; List<JComponent> buts = new ArrayList (); this.statusButton = UIUtils.createButton (this.getStatusIconName (EditorEditor.OnlineStatus.offline), Constants.ICON_SIDEBAR, "Click to go online", null); this.userOnlineStatusChanged (new UserOnlineStatusEvent (EditorsEnvironment.getUserOnlineStatus ())); this.statusButton.addActionListener (new ActionListener () { public void actionPerformed (ActionEvent ev) { if (EditorsEnvironment.isUserLoggedIn ()) { JPopupMenu m = new JPopupMenu (); Set<EditorEditor.OnlineStatus> statuses = new LinkedHashSet (); statuses.add (EditorEditor.OnlineStatus.online); statuses.add (EditorEditor.OnlineStatus.busy); statuses.add (EditorEditor.OnlineStatus.away); statuses.add (EditorEditor.OnlineStatus.snooze); for (EditorEditor.OnlineStatus v : statuses) { m.add (_this.createStatusMenuItem (v)); } JMenuItem mi = UIUtils.createMenuItem ("Logout", _this.getStatusIconName (null), new ActionListener () { public void actionPerformed (ActionEvent ev) { EditorsEnvironment.goOffline (); } }); m.add (mi); m.show (_this.statusButton, 10, 10); } else { QPopup np = null; if (EditorsEnvironment.hasLoginCredentials ()) { np = _this.showNotification (Constants.LOADING_GIF_NAME, "Logging in...", -1, _this.statusButton); } // TODO: Very nasty, fix. final QPopup fnp = np; EditorsEnvironment.goOnline (null, new ActionListener () { public void actionPerformed (ActionEvent ev) { if (fnp != null) { fnp.removeFromParent (); } _this.updateView (); } }, // On cancel null, // On error new ActionListener () { public void actionPerformed (ActionEvent ev) { _this.hideNotification (); EditorsUIUtils.showLoginError ("Unable to login, please check your email and password.", new ActionListener () { public void actionPerformed (ActionEvent ev) { EditorsEnvironment.goOnline (null, null, null, null); } }, null); } }); } } }); buts.add (this.statusButton); /* b = UIUtils.createButton (Constants.FIND_ICON_NAME, Constants.ICON_SIDEBAR, "Click to find editors", new ActionAdapter () { public void actionPerformed (ActionEvent ev) { _this.toggleFindEditorsTab (); } }); buts.add (b); */ JButton b = UIUtils.createButton (Constants.NEW_ICON_NAME, Constants.ICON_SIDEBAR, "Click to send an invite", new ActionAdapter () { public void actionPerformed (ActionEvent ev) { EditorsUIUtils.showInviteEditor (_this.projectViewer); } }); buts.add (b); b = UIUtils.createButton (Constants.OPTIONS_ICON_NAME, Constants.ICON_SIDEBAR, "Click to view the config options", new ActionAdapter () { public void actionPerformed (ActionEvent ev) { JMenuItem mi = null; JPopupMenu popup = new JPopupMenu (); // Get all previous editors. int prevCount = 0; for (EditorEditor ed : EditorsEnvironment.getEditors ()) { if (ed.isPrevious ()) { prevCount++; } } if (prevCount > 0) { popup.add (UIUtils.createMenuItem (String.format ("View the previous {contacts} (%s)", Environment.formatNumber (prevCount)), Constants.STOP_ICON_NAME, new ActionListener () { public void actionPerformed (ActionEvent ev) { _this.showPreviousEditors (); } })); } popup.add (UIUtils.createMenuItem ("Update your name/avatar", Constants.EDIT_ICON_NAME, new ActionListener () { public void actionPerformed (ActionEvent ev) { EditorsUIUtils.updateYourInfo (_this.projectViewer); } })); popup.add (UIUtils.createMenuItem ("Change your password", Constants.EDIT_ICON_NAME, new ActionListener () { public void actionPerformed (ActionEvent ev) { EditorsUIUtils.showChangePassword (_this.projectViewer); } })); if (!EditorsEnvironment.getEditorsPropertyAsBoolean (Constants.QW_EDITORS_SERVICE_HAS_LOGGED_IN_PROPERTY_NAME)) { popup.add (UIUtils.createMenuItem ("Resend confirmation email", Constants.SEND_ICON_NAME, new ActionListener () { public void actionPerformed (ActionEvent ev) { UIUtils.openURL (_this.projectViewer, String.format (Environment.getQuollWriterWebsiteLink ("editor-mode/send-account-confirmation-email?email=%s", EditorsEnvironment.getUserAccount ().getEmail ()))); } })); } else { // If they have their password stored then display it. final String edPass = EditorsEnvironment.getEditorsProperty (Constants.QW_EDITORS_SERVICE_PASSWORD_PROPERTY_NAME); if (edPass != null) { popup.add (UIUtils.createMenuItem ("Display your password", Constants.VIEW_ICON_NAME, new ActionListener () { public void actionPerformed (ActionEvent ev) { String extra = ""; if (Environment.isDebugModeEnabled ()) { extra = String.format ("<br />Messaging Username: <b>%s</b>", EditorsEnvironment.getUserAccount ().getMessagingUsername ()); } UIUtils.showMessage ((PopupsSupported) _this.projectViewer, "Your Editors service password", String.format ("Note: your password is being displayed because you have checked the <i>Save password</i> box for logging into the Editors service.<br /><br />Your login details are:<br /><br />Email address: <b>%s</b><br />Password: <b>%s</b>%s", EditorsEnvironment.getUserAccount ().getEmail (), edPass, extra)); } })); } else { popup.add (UIUtils.createMenuItem ("Reset your password", Constants.CANCEL_ICON_NAME, new ActionListener () { public void actionPerformed (ActionEvent ev) { UIUtils.openURL (_this.projectViewer, String.format (Environment.getQuollWriterWebsiteLink ("editor-mode/send-password-reset-email?email=%s", null), EditorsEnvironment.getUserAccount ().getEmail ())); } })); } } popup.add (UIUtils.createMenuItem ("Preferences", Constants.CONFIG_ICON_NAME, new ActionListener () { public void actionPerformed (ActionEvent ev) { AbstractProjectViewer viewer = Environment.getFocusedProjectViewer (); viewer.showOptions ("editors"); } })); popup.add (UIUtils.createMenuItem ("Help", Constants.HELP_ICON_NAME, new ActionListener () { public void actionPerformed (ActionEvent ev) { UIUtils.openURL (_this, Environment.getQuollWriterHelpLink ("editor-mode/sidebar", null)); } })); popup.add (UIUtils.createMenuItem ("Delete your account", Constants.DELETE_ICON_NAME, new ActionListener () { public void actionPerformed (ActionEvent ev) { EditorsUIUtils.showDeleteAccount (_this.projectViewer); } })); JComponent s = (JComponent) ev.getSource (); popup.show (s, s.getWidth () / 2, s.getHeight ()); } }); buts.add (b); return buts; } private void createWelcomeTab () { try { this.projectViewer.addHelpTextTab ("Welcome to the Editors Service", StringUtils.replaceString (Environment.getResourceFileAsString (Constants.EDITORS_SIDEBAR_FIRST_USE_HELP_FILE), "\n", ""), Constants.EDITORS_ICON_NAME, "editors-service-first-help"); } catch (Exception e) { Environment.logError ("Unable to get editors sidebar first use help file", e); return; } } private void createEditorList () { final EditorsSideBar _this = this; // Get all our editors. List<EditorEditor> editors = EditorsEnvironment.getEditors (); Box edBox = new Box (BoxLayout.Y_AXIS); edBox.setOpaque (true); edBox.setBackground (UIUtils.getComponentColor ()); edBox.setBorder (UIUtils.createPadding (0, 0, 0, 0)); this.firstLogin = new Box (BoxLayout.Y_AXIS); this.firstLogin.add (UIUtils.createBoldSubHeader ("<i>Checked your email?</i>", null)); JComponent firstLoginHelp = UIUtils.createHelpTextPane ("Once you've validated your email address click on the button below to login.", this.projectViewer); firstLoginHelp.setBorder (null); this.firstLogin.setBorder (new EmptyBorder (5, 5, 5, 5)); Box bfirstLoginHelp = new Box (BoxLayout.Y_AXIS); bfirstLoginHelp.setAlignmentX (Component.LEFT_ALIGNMENT); bfirstLoginHelp.setBorder (UIUtils.createPadding (0, 5, 0, 5)); bfirstLoginHelp.add (firstLoginHelp); this.firstLogin.add (bfirstLoginHelp); this.firstLogin.add (Box.createVerticalStrut (10)); JButton but = UIUtils.createToolBarButton (this.getStatusIconName (null), "Click to go online", null, new ActionListener () { public void actionPerformed (ActionEvent ev) { EditorsEnvironment.goOnline (null, new ActionListener () { public void actionPerformed (ActionEvent ev) { _this.updateView (); } }, // On cancel null, // On error null); } }); but.setFont (but.getFont ().deriveFont (UIUtils.getScaledFontSize (16))); but.setText ("Click to Login"); List<JButton> buts = new ArrayList (); buts.add (but); Box bbar = new Box (BoxLayout.X_AXIS); bbar.setAlignmentX (Component.LEFT_ALIGNMENT); bbar.add (Box.createHorizontalGlue ()); bbar.add (UIUtils.createButtonBar (buts)); bbar.add (Box.createHorizontalGlue ()); this.firstLogin.add (bbar); edBox.add (this.firstLogin); this.noEditors = new Box (BoxLayout.Y_AXIS); this.noEditors.add (UIUtils.createBoldSubHeader ("<i>No current {contacts}</i>", null)); JComponent noedsHelp = UIUtils.createHelpTextPane ("You currently have no {contacts}. Click on the button below to invite someone to be {an editor} for your {project}.", this.projectViewer); noedsHelp.setBorder (null); this.noEditors.setBorder (new EmptyBorder (5, 5, 5, 5)); Box bnoedsHelp = new Box (BoxLayout.Y_AXIS); bnoedsHelp.setAlignmentX (Component.LEFT_ALIGNMENT); bnoedsHelp.setBorder (UIUtils.createPadding (0, 5, 0, 5)); bnoedsHelp.add (noedsHelp); this.noEditors.add (bnoedsHelp); this.noEditors.add (Box.createVerticalStrut (10)); but = UIUtils.createToolBarButton (Constants.NEW_ICON_NAME, "Click to invite someone to be {an editor} for your {project}", null, new ActionListener () { public void actionPerformed (ActionEvent ev) { EditorsUIUtils.showInviteEditor (_this.projectViewer); } }); but.setFont (but.getFont ().deriveFont ((float) 16)); but.setText ("Send an invite"); buts = new ArrayList (); buts.add (but); bbar = new Box (BoxLayout.X_AXIS); bbar.setAlignmentX (Component.LEFT_ALIGNMENT); bbar.add (Box.createHorizontalGlue ()); bbar.add (UIUtils.createButtonBar (buts)); bbar.add (Box.createHorizontalGlue ()); this.noEditors.add (bbar); edBox.add (this.noEditors); this.invitesForMe = new EditorsSection ("Invites from others", "Invites I've received from other people.", null, this.projectViewer); edBox.add (this.invitesForMe); this.invitesIveSent = new EditorsSection ("Pending invites", "Invites I've sent to other people.", null, this.projectViewer); edBox.add (this.invitesIveSent); this.otherEditors = new EditorsSection ("All {Contacts}", //Editors null, null, this.projectViewer); edBox.add (this.otherEditors); final JScrollPane sp = UIUtils.createScrollPane (edBox); sp.setBorder (null); this.tabs.add (sp, 0); this.tabs.setIconAt (0, Environment.getIcon ("editors-list", Constants.ICON_EDITORS_LIST_TAB_HEADER)); this.updateView (); UIUtils.doLater (new ActionListener () { public void actionPerformed (ActionEvent ev) { sp.getHorizontalScrollBar ().setValue (0); sp.getVerticalScrollBar ().setValue (0); } }); } private void updateView () { Set<EditorEditor> invitesForMe = new LinkedHashSet (); Set<ProjectEditor> projEds = new LinkedHashSet (); Set<EditorEditor> others = new LinkedHashSet (); Set<EditorEditor> invitesIveSent = new LinkedHashSet (); int edsSize = 0; for (EditorEditor ed : EditorsEnvironment.getEditors ()) { if (ed.isPrevious ()) { continue; } if (ed.isRejected ()) { continue; } edsSize++; if (ed.isPending ()) { if (!ed.isInvitedByMe ()) { invitesForMe.add (ed); } else { invitesIveSent.add (ed); } } else { others.add (ed); /* ProjectEditor pe = this.projectViewer.getProject ().getProjectEditor (ed); if (pe != null) { projEds.add (pe); } else { others.add (ed); } */ } } this.otherEditors.setVisible (others.size () > 0); try { this.otherEditors.update (others); } catch (Exception e) { Environment.logError ("Unable to update other editors section with editors: " + others, e); this.otherEditors.setVisible (false); UIUtils.showErrorMessage (this.projectViewer, "Unable to display others section, please contact Quoll Writer support for assistance."); } this.invitesForMe.setVisible (invitesForMe.size () > 0); try { this.invitesForMe.update (invitesForMe); } catch (Exception e) { Environment.logError ("Unable to update invites for me editors section with editors: " + invitesForMe, e); this.invitesForMe.setVisible (false); UIUtils.showErrorMessage (this.projectViewer, "Unable to display invites from others section, please contact Quoll Writer support for assistance."); } this.invitesIveSent.setVisible (invitesIveSent.size () > 0); try { this.invitesIveSent.update (invitesIveSent); } catch (Exception e) { Environment.logError ("Unable to update invites ive sent section with editors: " + invitesIveSent, e); this.invitesIveSent.setVisible (false); UIUtils.showErrorMessage (this.projectViewer, "Unable to display invites I've sent section, please contact Quoll Writer support for assistance."); } this.noEditors.setVisible (edsSize == 0); this.firstLogin.setVisible (false); if (EditorsEnvironment.getUserAccount ().getLastLogin () == null) { this.firstLogin.setVisible (true); this.noEditors.setVisible (false); } this.validate (); this.repaint (); } private void hideNotification () { if (this.notification != null) { this.notification.setVisible (false); } } private QPopup showNotification (String iconType, String message, int duration, Component showAt) { final EditorsSideBar _this = this; final QPopup p = new QPopup (message, Environment.getIcon (iconType, Constants.ICON_EDITOR_MESSAGE), null); p.getHeader ().setFont (p.getHeader ().getFont ().deriveFont ((float) 14)); p.getHeader ().setBorder (UIUtils.createPadding (10, 10, 10, 10)); this.projectViewer.showPopupAt (p, showAt, true); if (duration > 0) { javax.swing.Timer timer = new javax.swing.Timer (duration * 1000, new ActionAdapter () { public void actionPerformed (ActionEvent ev) { p.removeFromParent (); } }); timer.setRepeats (false); timer.start (); } return p; } public JComponent getContent () { final EditorsSideBar _this = this; Box box = new Box (BoxLayout.Y_AXIS); box.setAlignmentX (Component.LEFT_ALIGNMENT); // Turn off the auto border. box.setBorder (UIUtils.createPadding (0, 0, 0, 0)); box.setOpaque (true); box.setBackground (UIUtils.getComponentColor ()); this.notification = new Box (BoxLayout.Y_AXIS); this.notification.setAlignmentX (Component.LEFT_ALIGNMENT); this.notification.setVisible (false); this.notification.setBorder (UIUtils.createPadding (5, 5, 5, 5)); box.add (this.notification); this.tabs = new DnDTabbedPane (); this.tabs.putClientProperty(com.jgoodies.looks.Options.NO_CONTENT_BORDER_KEY, Boolean.TRUE); //this.tabs.putClientProperty(com.jgoodies.looks.Options.EMBEDDED_TABS_KEY, Boolean.TRUE); this.tabs.setAlignmentX (Component.LEFT_ALIGNMENT); this.tabs.setTabLayoutPolicy (JTabbedPane.SCROLL_TAB_LAYOUT); this.tabs.setBorder (UIUtils.createPadding (5, 2, 0, 0)); this.tabs.addChangeListener (new ChangeAdapter () { public void stateChanged (ChangeEvent ev) { } }); box.add (this.tabs); this.createEditorList (); // See if this is the first time the user has seen the side bar. if (!EditorsEnvironment.getEditorsPropertyAsBoolean (Constants.QW_EDITORS_SERVICE_EDITORS_SIDEBAR_SEEN_PROPERTY_NAME)) { try { EditorsEnvironment.setEditorsProperty (Constants.QW_EDITORS_SERVICE_EDITORS_SIDEBAR_SEEN_PROPERTY_NAME, true); } catch (Exception e) { Environment.logError ("Unable to set editors sidebar seen property", e); } this.createWelcomeTab (); } return box; } private void addTabCloseListener (final JComponent header, final JComponent content) { final EditorsSideBar _this = this; header.addMouseListener (new MouseEventHandler () { @Override public void handlePress (MouseEvent ev) { _this.tabs.setSelectedComponent (content); } @Override public void handleMiddlePress (MouseEvent ev) { _this.tabs.remove (content); } @Override public void fillPopup (JPopupMenu menu, MouseEvent ev) { menu.add (UIUtils.createMenuItem ("Close", Constants.CLOSE_ICON_NAME, new ActionListener () { public void actionPerformed (ActionEvent ev) { _this.tabs.remove (content); } })); } }); } public void showPreviousEditors () { JComponent c = this.specialTabs.get ("previous-editors"); if (c != null) { this.tabs.remove (c); } final Box edBox = new ScrollableBox (BoxLayout.Y_AXIS); edBox.setAlignmentX (Component.LEFT_ALIGNMENT); edBox.setBorder (UIUtils.createPadding (0, 10, 0, 5)); edBox.setOpaque (true); edBox.setBackground (UIUtils.getComponentColor ()); JComponent nc = UIUtils.createInformationLabel ("{Contacts} you have removed or have removed you in the past."); nc.setBorder (UIUtils.createPadding (0, 0, 5, 0)); edBox.add (nc); int prevCount = 0; List<EditorEditor> eds = EditorsEnvironment.getEditors (); try { for (int i = 0; i < eds.size (); i++) { EditorEditor ed = eds.get (i); if (!ed.isPrevious ()) { continue; } prevCount++; EditorInfoBox infBox = this.getEditorBox (ed); if (i < eds.size () - 1) { infBox.setBorder (UIUtils.createBottomLineWithPadding (5, 0, 5, 0)); } else { infBox.setBorder (UIUtils.createPadding (5, 0, 5, 0)); } edBox.add (infBox); i++; } } catch (Exception e) { Environment.logError ("Unable to show editors: " + eds, e); UIUtils.showErrorMessage (this.projectViewer, "Unable to build list of previous {contacts}, please contact Quoll Writer support for assistance."); return; } if (prevCount == 0) { return; } edBox.add (Box.createVerticalGlue ()); AccordionItem prev = new AccordionItem (String.format ("Previous {contacts} (%s)", Environment.formatNumber (prevCount)), null, edBox); prev.setBorder (UIUtils.createPadding (0, 0, 0, 0)); prev.init (); Box wrapper = new ScrollableBox (BoxLayout.Y_AXIS); wrapper.setAlignmentX (Component.LEFT_ALIGNMENT); wrapper.add (prev); JScrollPane sp = UIUtils.createScrollPane (wrapper); sp.setBorder (null); this.tabs.add (sp, 1); JLabel l = new JLabel (Environment.getIcon (Constants.STOP_ICON_NAME, Constants.ICON_EDITORS_LIST_TAB_HEADER)); l.setToolTipText (Environment.replaceObjectNames ("Previous {contacts}")); this.tabs.setTabComponentAt (1, l); this.addTabCloseListener (l, sp); this.tabs.setSelectedComponent (sp); this.tabs.revalidate (); this.tabs.repaint (); this.specialTabs.put ("previous-editors", prev); } private void showMessagesInSpecialTab (Set<EditorMessage> messages, String iconName, String toolTipText, String tabName, String desc, ActionListener onDescClose) throws GeneralException { final EditorsSideBar _this = this; // Split the messages into Editor->Messages Map<EditorEditor, Set<EditorMessage>> edmessages = new LinkedHashMap (); for (EditorMessage m : messages) { EditorEditor ed = m.getEditor (); Set<EditorMessage> edm = edmessages.get (ed); if (edm == null) { edm = new LinkedHashSet (); edmessages.put (ed, edm); } edm.add (m); } final Box content = new ScrollableBox (BoxLayout.Y_AXIS); JScrollPane sp = UIUtils.createScrollPane (content); sp.setBorder (null); this.tabs.add (sp, 1); JLabel l = new JLabel (Environment.getIcon (iconName, Constants.ICON_EDITORS_LIST_TAB_HEADER)); l.setToolTipText (Environment.replaceObjectNames (toolTipText)); this.tabs.setTabComponentAt (1, l); this.addTabCloseListener (l, sp); if (desc != null) { JComponent nc = UIUtils.createInformationLabel (desc); nc.setBorder (UIUtils.createPadding (5, 5, 5, 5)); content.add (nc); } for (EditorEditor ed : edmessages.keySet ()) { Header h = UIUtils.createBoldSubHeader (String.format ("%s messages from", Environment.formatNumber (edmessages.get (ed).size ())), null); h.setBorder (UIUtils.createPadding (0, 5, 0, 0)); content.add (h); EditorPanel ep = new EditorPanel (this, ed, edmessages.get (ed)); ep.setShowChatBox (false); ep.init (); ep.setOpaque (true); ep.setBackground (UIUtils.getComponentColor ()); ep.setAlignmentX (Component.LEFT_ALIGNMENT); ep.setBorder (UIUtils.createPadding (5, 5, 5, 0)); content.add (ep); } this.tabs.setSelectedComponent (sp); this.tabs.revalidate (); this.tabs.repaint (); this.specialTabs.put (tabName, sp); } public void toggleFindEditorsTab () { if ((this.editorFindPanel != null) && (this.editorFindPanel.getParent () != null) ) { this.hideFindEditorsTab (); return; } this.showFindEditorsTab (); } public void hideFindEditorsTab () { if (this.editorFindPanel == null) { return; } this.tabs.remove (this.editorFindPanel); } public void showFindEditorsTab () { if ((this.editorFindPanel != null) && (this.editorFindPanel.getParent () != null) ) { this.tabs.setSelectedComponent (this.editorFindPanel); this.tabs.revalidate (); this.tabs.repaint (); return; } if (this.editorFindPanel == null) { // Add the editor find panel. this.editorFindPanel = new EditorFindPanel (this); this.editorFindPanel.init (); this.editorFindPanel.setOpaque (true); this.editorFindPanel.setBackground (UIUtils.getComponentColor ()); this.editorFindPanel.setAlignmentX (Component.LEFT_ALIGNMENT); this.editorFindPanel.setBorder (new EmptyBorder (5, 5, 5, 5)); } JLabel add = new JLabel (Environment.getIcon (Constants.FIND_ICON_NAME, Constants.ICON_TAB_HEADER)); this.tabs.add (this.editorFindPanel, 1); this.tabs.setTabComponentAt (1, add); this.showFindEditorsTab (); } public EditorPanel getEditorPanel (EditorEditor ed) { for (int i = 0; i < this.tabs.getTabCount (); i++) { Component comp = this.tabs.getComponentAt (i); if (comp instanceof EditorPanel) { if (ed == ((EditorPanel) comp).getEditor ()) { return (EditorPanel) comp; } } } return null; } public void showChatBox (final EditorEditor ed) throws GeneralException { this.showEditor (ed); final EditorsSideBar _this = this; UIUtils.doLater (new ActionListener () { @Override public void actionPerformed (ActionEvent ev) { EditorPanel edPanel = _this.getEditorPanel (ed); if (edPanel != null) { edPanel.showChatBox (); } } }); } public void showEditor (EditorEditor ed) throws GeneralException { final EditorsSideBar _this = this; EditorPanel edPanel = this.getEditorPanel (ed); if (edPanel != null) { this.editorChanged (new EditorChangedEvent (ed, EditorChangedEvent.EDITOR_CHANGED)); this.tabs.setSelectedComponent (edPanel); this.tabs.revalidate (); this.tabs.repaint (); return; } if (!ed.messagesLoaded ()) { try { EditorsEnvironment.loadMessagesForEditor (ed); } catch (Exception e) { throw new GeneralException ("Unable to load messages for editor: " + ed, e); } } EditorPanel ep = new EditorPanel (this, ed); ep.init (); ep.setOpaque (true); ep.setBackground (UIUtils.getComponentColor ()); ep.setAlignmentX (Component.LEFT_ALIGNMENT); ep.setBorder (UIUtils.createPadding (5, 5, 5, 0)); int ind = this.tabs.getTabCount (); this.tabs.add (ep); final JLabel th = new JLabel (); th.setBorder (new CompoundBorder (UIUtils.createPadding (2, 0, 0, 0), UIUtils.createLineBorder ())); th.setMaximumSize (new Dimension (100, 100)); this.tabs.setTabComponentAt (ind, th); this.addTabHeaderMouseHandler (th, ep); this.showEditor (ed); } public void init () throws GeneralException { super.init (); } private void addTabHeaderMouseHandler (final JComponent tabHeader, final JComponent tab) { final EditorsSideBar _this = this; tabHeader.addMouseListener (new MouseEventHandler () { @Override public void handlePress (MouseEvent ev) { _this.tabs.setSelectedComponent (tab); } @Override public void handleMiddlePress (MouseEvent ev) { _this.tabs.remove (tab); } @Override public void fillPopup (JPopupMenu menu, MouseEvent ev) { menu.add (UIUtils.createMenuItem ("Close", Constants.CLOSE_ICON_NAME, new ActionListener () { public void actionPerformed (ActionEvent ev) { _this.tabs.remove (tab); } })); } }); } public String getStatusIconName (EditorEditor.OnlineStatus status) { String type = "offline"; if (status != null) { type = status.getType (); } return Constants.ONLINE_STATUS_ICON_NAME_PREFIX + type; } private boolean isPendingInviteForMe (EditorEditor ed) { return ed.isPending () && !ed.isInvitedByMe (); } /** * Always 250, 200. */ @Override public Dimension getMinimumSize () { return new Dimension (UIUtils.getScreenScaledWidth (250), 200); } public JComponent createEditorsFindList (List<EditorEditor> editors) { Box b = new Box (BoxLayout.Y_AXIS); b.setAlignmentX (Component.LEFT_ALIGNMENT); for (int i = 0; i < editors.size (); i++) { EditorEditor ed = editors.get (i); EditorFindInfoBox infBox = this.getEditorFindBox (ed); if (i < editors.size () - 1) { infBox.setBorder (new CompoundBorder (new MatteBorder (0, 0, 1, 0, UIUtils.getBorderColor ()), new EmptyBorder (5, 0, 5, 0))); } else { infBox.setBorder (new EmptyBorder (5, 0, 5, 0)); } infBox.setMaximumSize (new Dimension (Short.MAX_VALUE, infBox.getPreferredSize ().height + 10)); /* infBox.setMinimumSize (new Dimension (300, infBox.getPreferredSize ().height)); */ b.add (infBox); } b.setBorder (new EmptyBorder (0, 10, 0, 5)); return b; } private EditorInfoBox getEditorBox (final EditorEditor ed) throws GeneralException { EditorInfoBox b = new EditorInfoBox (ed, this.projectViewer, false); b.setAlignmentX (Component.LEFT_ALIGNMENT); b.addFullPopupListener (); b.init (); return b; } private EditorFindInfoBox getEditorFindBox (final EditorEditor ed) { EditorFindInfoBox b = new EditorFindInfoBox (ed); b.setAlignmentX (Component.LEFT_ALIGNMENT); final EditorsSideBar _this = this; b.addMouseListener (new MouseEventHandler () { @Override public void handlePress (MouseEvent ev) { // Show the editor. try { _this.showChatBox (ed); } catch (Exception e) { UIUtils.showErrorMessage (_this.projectViewer, "Unable to show {editor}."); Environment.logError ("Unable to show editor: " + ed, e); } } }); UIUtils.setAsButton (b); return b; } private class EditorsSection extends AccordionItem { private Box editorsListWrapper = null; private AbstractProjectViewer viewer = null; private String title = null; private JLabel help = null; private JLabel noEditorsHelp = null; private ComponentListener listener = null; public EditorsSection (String title, String help, String noEditorsHelp, AbstractProjectViewer viewer) { super ("", null); this.title = title; this.viewer = viewer; final EditorsSection _this = this; this.listener = new ComponentAdapter () { @Override public void componentResized (ComponentEvent ev) { int count = _this.editorsListWrapper.getComponentCount (); for (int i = 0; i < count; i++) { EditorInfoBox infBox = (EditorInfoBox) _this.editorsListWrapper.getComponent (i); if (infBox == ev.getSource ()) { _this.setBorder (infBox, i == (count - 1)); } } } }; Box content = new Box (BoxLayout.Y_AXIS); content.setAlignmentX (Component.LEFT_ALIGNMENT); content.setBorder (UIUtils.createPadding (0, 10, 0, 5)); if (help != null) { this.help = UIUtils.createInformationLabel (help); this.help.setBorder (UIUtils.createPadding (0, 0, 5, 5)); content.add (this.help); } if (noEditorsHelp != null) { this.noEditorsHelp = UIUtils.createInformationLabel (noEditorsHelp); this.noEditorsHelp.setBorder (UIUtils.createPadding (0, 0, 5, 5)); content.add (this.noEditorsHelp); } this.editorsListWrapper = new Box (BoxLayout.Y_AXIS); content.add (this.editorsListWrapper); this.setContent (content); } private EditorInfoBox getEditorBox (EditorEditor ed) throws GeneralException { EditorInfoBox b = new EditorInfoBox (ed, this.viewer, false); b.setAlignmentX (Component.LEFT_ALIGNMENT); b.addFullPopupListener (); b.init (); b.addComponentListener (this.listener); return b; } /* public void updateForEditor (EditorEditor ed) { int count = this.editorsListWrapper.getComponentCount (); for (int i = 0; i < count; i++) { EditorInfoBox infBox = (EditorInfoBox) this.editorsListWrapper.getComponent (i); if (infBox.getEditor () == ed) { this.setBorder (infBox, i == (count - 1)); } } } */ public void update (Set<EditorEditor> eds) throws GeneralException { this.setTitle (String.format ("%s (%s)", this.title, Environment.formatNumber (eds.size ()))); if (this.help != null) { this.help.setVisible (eds.size () > 0); } if (this.noEditorsHelp != null) { this.noEditorsHelp.setVisible (eds.size () == 0); } this.editorsListWrapper.removeAll (); EditorInfoBox last = null; for (EditorEditor ed : eds) { EditorInfoBox infBox = this.getEditorBox (ed); last = infBox; this.editorsListWrapper.add (infBox); this.setBorder (infBox, false); } if (last != null) { this.setBorder (last, true); } } private void setBorder (EditorInfoBox b, boolean isLast) { b.setBorder (isLast ? UIUtils.createPadding (5, 0, 5, 0) : UIUtils.createBottomLineWithPadding (5, 0, 5, 0)); /* if (b.isShowAttentionBorder ()) { b.setBorder (new CompoundBorder (new CompoundBorder (new MatteBorder (0, 2, 0, 0, UIUtils.getColor ("#ff0000")), UIUtils.createPadding (0, 5, 0, 0)), isLast ? UIUtils.createPadding (5, 0, 5, 0) : UIUtils.createBottomLineWithPadding (5, 0, 5, 0))); } else { b.setBorder (isLast ? UIUtils.createPadding (5, 0, 5, 0) : UIUtils.createBottomLineWithPadding (5, 0, 5, 0)); } */ } public void updateForProjectEditors (Set<ProjectEditor> pes) throws GeneralException { Set<EditorEditor> eds = new LinkedHashSet (); for (ProjectEditor pe : pes) { eds.add (pe.getEditor ()); } this.update (eds); } } }
package com.swabunga.spell.engine; import java.io.*; import java.util.*; /** * Another implementation of <code>SpellDictionary</code> that doesn't cache any words in memory. Avoids the huge * footprint of <code>SpellDictionaryHashMap</code> at the cost of relatively minor latency. A future version * of this class that implements some caching strategies might be a good idea in the future, if there's any * demand for it. * * This implementation requires a special dictionary file, with "code*word" lines sorted by code. * It's using a dichotomy algorithm to search for words in the dictionary * * @author Damien Guillaume * @version 0.1 */ public class SpellDictionaryDichoDisk extends SpellDictionaryASpell { /** Holds the dictionary file for reading*/ private RandomAccessFile dictFile = null; /** dictionary and phonetic file encoding */ private String encoding = null; /** * Dictionary Convienence Constructor. */ public SpellDictionaryDichoDisk(File wordList) throws FileNotFoundException, IOException { super((File) null); dictFile = new RandomAccessFile(wordList, "r"); } /** * Dictionary Convienence Constructor. */ public SpellDictionaryDichoDisk(File wordList, String encoding) throws FileNotFoundException, IOException { super((File) null); this.encoding = encoding; dictFile = new RandomAccessFile(wordList, "r"); } /** * Dictionary constructor that uses an aspell phonetic file to * build the transformation table. */ public SpellDictionaryDichoDisk(File wordList, File phonetic) throws FileNotFoundException, IOException { super(phonetic); dictFile = new RandomAccessFile(wordList, "r"); } /** * Dictionary constructor that uses an aspell phonetic file to * build the transformation table. */ public SpellDictionaryDichoDisk(File wordList, File phonetic, String encoding) throws FileNotFoundException, IOException { super(phonetic, encoding); this.encoding = encoding; dictFile = new RandomAccessFile(wordList, "r"); } /** * Add a word permanantly to the dictionary (and the dictionary file). * <i>not implemented !</i> */ public void addWord(String word) { System.err.println("error: addWord is not implemented for SpellDictionaryDichoDisk"); } /** * Search the dictionary file for the words corresponding to the code * within positions p1 - p2 */ private LinkedList dichoFind(String code, long p1, long p2) throws IOException { //System.out.println("dichoFind("+code+","+p1+","+p2+")"); long pm = (p1 + p2) / 2; dictFile.seek(pm); String l; if (encoding == null) l = dictFile.readLine(); else l = dictReadLine(); pm = dictFile.getFilePointer(); if (encoding == null) l = dictFile.readLine(); else l = dictReadLine(); long pm2 = dictFile.getFilePointer(); if (pm2 >= p2) return(seqFind(code, p1, p2)); int istar = l.indexOf('*'); if (istar == -1) throw new IOException("bad format: no * !"); String testcode = l.substring(0, istar); int comp = code.compareTo(testcode); if (comp < 0) return(dichoFind(code, p1, pm-1)); else if (comp > 0) return(dichoFind(code, pm2, p2)); else { LinkedList l1 = dichoFind(code, p1, pm-1); LinkedList l2 = dichoFind(code, pm2, p2); String word = l.substring(istar+1); l1.add(word); l1.addAll(l2); return(l1); } } private LinkedList seqFind(String code, long p1, long p2) throws IOException { //System.out.println("seqFind("+code+","+p1+","+p2+")"); LinkedList list = new LinkedList(); dictFile.seek(p1); while (dictFile.getFilePointer() < p2) { String l; if (encoding == null) l = dictFile.readLine(); else l = dictReadLine(); int istar = l.indexOf('*'); if (istar == -1) throw new IOException("bad format: no * !"); String testcode = l.substring(0, istar); if (testcode.equals(code)) { String word = l.substring(istar+1); list.add(word); } } return(list); } /** * Read a line of dictFile with a specific encoding */ private String dictReadLine() throws IOException { int max = 255; byte b=0; byte[] buf = new byte[max]; int i=0; try { for (; b != '\n' && b != '\r' && i<max-1; i++) { b = dictFile.readByte(); buf[i] = b; } } catch (EOFException ex) { } if (i == 0) return(""); String s = new String(buf, 0, i-1, encoding); return(s); } /** * Returns a list of strings (words) for the code. */ public List getWords(String code) { //System.out.println("getWords("+code+")"); LinkedList list; try { list = dichoFind(code, 0, dictFile.length()-1); //System.out.println(list); } catch (IOException ex) { System.err.println("IOException: " + ex.getMessage()); list = new LinkedList(); } return list; } }
package com.tempestasludi.java.p14_cssp.pcss.general; import java.util.ArrayList; import com.tempestasludi.java.p14_cssp.pcss.properties.Variable; import com.tempestasludi.java.p14_cssp.pcss.selectors.Selector; /** * Block represents a CSS block, starting with a set of selectors, followed by a * set of units. * * @author Tempestas Ludi */ public class Block implements Unit { /** * The selectors of the block. */ private ArrayList<Selector> selectors; /** * The units of the block. */ private ArrayList<Unit> units; /** * Class constructor. * * @param selectors * the selectors of the block * @param units * the units of the block */ public Block(ArrayList<Selector> selectors, ArrayList<Unit> units) { super(); this.selectors = new ArrayList<Selector>(selectors); this.units = new ArrayList<Unit>(units); } /** * Get the selectors * * @return the selectors */ public ArrayList<Selector> getSelectors() { return new ArrayList<Selector>(selectors); } /** * Get the units * * @return the units */ public ArrayList<Unit> getUnits() { return new ArrayList<Unit>(units); } /** * Change the selectors * * @param selectors * the selectors to change to */ public void setSelectors(ArrayList<Selector> selectors) { this.selectors = new ArrayList<Selector>(selectors); } /** * Change the units * * @param units * the units to change to */ public void setUnits(ArrayList<Unit> units) { this.units = new ArrayList<Unit>(units); } /** * Reads a CSS block into a Block. * * @param block * the block data to read * @return a document containing the file */ public static Block read(String block) { return null; } /** * {@inheritDoc} */ @Override public Unit preprocess(ArrayList<Variable> variables) { // TODO Auto-generated method stub return null; } /** * {@inheritDoc} */ @Override public String toString() { StringBuilder builder = new StringBuilder(); for (int i = 0; i < this.selectors.size(); i++) { builder.append(this.selectors.get(i)).append(" "); } builder.append("{\n"); for (int i = 0; i < this.units.size(); i++) { builder.append("\t"); String unitString = this.units.get(i).toString().replaceAll("\n", "\n\t"); if (i == this.units.size() - 1) { unitString = unitString.trim(); } builder.append(unitString); builder.append("\n"); } builder.append("}"); return builder.toString(); } }
package com.vectrace.MercurialEclipse.dialogs; import java.io.File; import java.util.ArrayList; import java.util.List; import org.eclipse.compare.ResourceNode; import org.eclipse.core.resources.IResource; import org.eclipse.core.runtime.Platform; import org.eclipse.core.runtime.content.IContentType; import org.eclipse.core.runtime.content.IContentTypeManager; import org.eclipse.jface.dialogs.TrayDialog; import org.eclipse.jface.text.Document; import org.eclipse.jface.text.IDocument; import org.eclipse.jface.text.ITextListener; import org.eclipse.jface.text.TextEvent; import org.eclipse.jface.text.source.AnnotationModel; import org.eclipse.jface.text.source.ISourceViewer; import org.eclipse.jface.text.source.SourceViewer; import org.eclipse.jface.viewers.ArrayContentProvider; import org.eclipse.jface.viewers.CheckboxTableViewer; import org.eclipse.jface.viewers.ColumnPixelData; import org.eclipse.jface.viewers.DoubleClickEvent; import org.eclipse.jface.viewers.IDoubleClickListener; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.ITableLabelProvider; import org.eclipse.jface.viewers.StructuredViewer; import org.eclipse.jface.viewers.TableLayout; import org.eclipse.jface.viewers.Viewer; import org.eclipse.jface.viewers.ViewerFilter; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableColumn; import org.eclipse.ui.editors.text.EditorsUI; import org.eclipse.ui.texteditor.AnnotationPreference; import org.eclipse.ui.texteditor.DefaultMarkerAnnotationAccess; import org.eclipse.ui.texteditor.SourceViewerDecorationSupport; import org.eclipse.ui.texteditor.spelling.SpellingAnnotation; import org.eclipse.ui.texteditor.spelling.SpellingContext; import org.eclipse.ui.texteditor.spelling.SpellingService; import com.vectrace.MercurialEclipse.TableColumnSorter; import com.vectrace.MercurialEclipse.compare.RevisionNode; import com.vectrace.MercurialEclipse.model.HgRoot; import com.vectrace.MercurialEclipse.team.IStorageMercurialRevision; import com.vectrace.MercurialEclipse.ui.TextSpellingProblemCollector; import com.vectrace.MercurialEclipse.utils.CompareUtils; /** * * A commit dialog box allowing choosing of what files to commit and a commit * message for those files. Untracked files may also be chosen. * */ public class CommitDialog extends TrayDialog { public static final String FILE_MODIFIED = "Modified"; public static final String FILE_ADDED = "Added"; public static final String FILE_REMOVED = "Removed"; public static final String FILE_UNTRACKED = "Untracked"; public static final String FILE_DELETED = "Already Deleted"; private class CommittableFilesFilter extends ViewerFilter { public CommittableFilesFilter() { super(); } /** * Filter out un commitable files (i.e. ! -> deleted but still tracked) */ @Override public boolean select(Viewer viewer, Object parentElement, Object element) { if (element instanceof CommitResource) { return true; } return true; } } private String defaultCommitMessage = "(no commit message)"; private ISourceViewer commitTextBox; private Label commitTextLabel; private Label commitFilesLabel; private CheckboxTableViewer commitFilesList; private boolean selectableFiles; private Button showUntrackedFilesButton; private Button selectAllButton; private UntrackedFilesFilter untrackedFilesFilter; private CommittableFilesFilter committableFilesFilter; private HgRoot root; private File[] filesToAdd; private List<IResource> resourcesToAdd; private File[] filesToCommit; private IResource[] resourcesToCommit; private String commitMessage; private IResource[] inResources; private File[] filesToRemove; private List<IResource> resourcesToRemove; private IDocument commitTextDocument; private SourceViewerDecorationSupport decorationSupport; /** * @param shell */ public CommitDialog(Shell shell, HgRoot root, IResource[] inResources) { super(shell); setShellStyle(getShellStyle() | SWT.RESIZE | SWT.TITLE); this.root = root; this.inResources = inResources; this.untrackedFilesFilter = new UntrackedFilesFilter(); this.committableFilesFilter = new CommittableFilesFilter(); this.selectableFiles = true; this.commitTextDocument = new Document(); } public CommitDialog(Shell shell, HgRoot root, IResource[] inResources, String defaultCommitMessage, boolean selectableFiles) { this(shell, root, inResources); this.selectableFiles = selectableFiles; this.defaultCommitMessage = defaultCommitMessage; } public String getCommitMessage() { return commitMessage; } public File[] getFilesToCommit() { return filesToCommit; } public IResource[] getResourcesToCommit() { return resourcesToCommit; } public File[] getFilesToAdd() { return filesToAdd; } public List<IResource> getResourcesToAdd() { return resourcesToAdd; } @Override protected Control createDialogArea(Composite parent) { Composite container = (Composite) super.createDialogArea(parent); container.setLayout(new FormLayout()); commitTextLabel = new Label(container, SWT.NONE); commitTextLabel.setText("Commit comments"); // commitTextBox = new Text(container, SWT.V_SCROLL | SWT.MULTI // | SWT.BORDER | SWT.WRAP); commitTextBox = new SourceViewer(container, null, SWT.V_SCROLL | SWT.MULTI | SWT.BORDER | SWT.WRAP); commitTextBox.setEditable(true); // set up spell-check annotations AnnotationModel annotationModel = new AnnotationModel(); commitTextBox.setDocument(commitTextDocument, annotationModel); decorationSupport = new SourceViewerDecorationSupport(commitTextBox, null, new DefaultMarkerAnnotationAccess(), EditorsUI .getSharedTextColors()); AnnotationPreference pref = new AnnotationPreference(); pref.setAnnotationType(SpellingAnnotation.TYPE); pref.setColorPreferenceKey("spellingIndicationColor"); pref.setHighlightPreferenceKey("spellingIndicationHighlighting"); pref.setTextPreferenceKey("spellingIndication"); decorationSupport.setAnnotationPreference(pref); decorationSupport.install(EditorsUI.getPreferenceStore()); ITextListener textListener = new ITextListener() { private SpellingService spellService; private SpellingContext spellContext; private TextSpellingProblemCollector collector; public void textChanged(TextEvent event) { // connect to spell service if necessary if (spellService == null) { spellService = EditorsUI.getSpellingService(); } if (spellContext == null) { spellContext = new SpellingContext(); IContentType contentType = Platform.getContentTypeManager() .getContentType(IContentTypeManager.CT_TEXT); spellContext.setContentType(contentType); } if (collector == null) { collector = new TextSpellingProblemCollector(commitTextBox); } // check and highlight errors spellService.check(commitTextDocument, spellContext, collector, null); } }; commitTextBox.addTextListener(textListener); commitFilesLabel = new Label(container, SWT.NONE); commitFilesLabel.setText("Select Files:"); commitFilesList = createFilesList(container, selectableFiles); final FormData fd_commitTextLabel = new FormData(); fd_commitTextLabel.top = new FormAttachment(0, 20); fd_commitTextLabel.left = new FormAttachment(0, 9); fd_commitTextLabel.right = new FormAttachment(100, -9); commitTextLabel.setLayoutData(fd_commitTextLabel); final FormData fd_commitTextBox = new FormData(); fd_commitTextBox.top = new FormAttachment(commitTextLabel, 3, SWT.BOTTOM); fd_commitTextBox.left = new FormAttachment(0, 9); fd_commitTextBox.bottom = new FormAttachment(0, 200); fd_commitTextBox.right = new FormAttachment(100, -9); commitTextBox.getTextWidget().setLayoutData(fd_commitTextBox); final FormData fd_commitFilesLabel = new FormData(); fd_commitFilesLabel.top = new FormAttachment(commitTextBox .getTextWidget(), 3); fd_commitFilesLabel.left = new FormAttachment(0, 9); fd_commitFilesLabel.right = new FormAttachment(100, -9); commitFilesLabel.setLayoutData(fd_commitFilesLabel); Table table = commitFilesList.getTable(); final FormData fd_table = new FormData(); fd_table.top = new FormAttachment(commitFilesLabel, 3); fd_table.left = new FormAttachment(0, 9); fd_table.right = new FormAttachment(100, -9); fd_table.bottom = new FormAttachment(100, -9); table.setLayoutData(fd_table); if (selectableFiles) { selectAllButton = new Button(container, SWT.CHECK); selectAllButton.setText("Select/unselect all"); showUntrackedFilesButton = new Button(container, SWT.CHECK); showUntrackedFilesButton.setText("Show added/removed files"); fd_table.bottom = new FormAttachment(selectAllButton, -9); final FormData fd_selectAllButton = new FormData(); fd_selectAllButton.bottom = new FormAttachment( showUntrackedFilesButton); fd_selectAllButton.left = new FormAttachment(0, 9); fd_selectAllButton.right = new FormAttachment(100, -9); selectAllButton.setLayoutData(fd_selectAllButton); final FormData fd_showUntrackedFilesButton = new FormData(); fd_showUntrackedFilesButton.bottom = new FormAttachment(100, -34); fd_showUntrackedFilesButton.right = new FormAttachment(100, -9); fd_showUntrackedFilesButton.left = new FormAttachment(0, 9); showUntrackedFilesButton.setLayoutData(fd_showUntrackedFilesButton); } makeActions(); return container; } private void makeActions() { commitFilesList.addDoubleClickListener(new IDoubleClickListener() { public void doubleClick(DoubleClickEvent event) { IStructuredSelection sel = (IStructuredSelection) commitFilesList .getSelection(); if (sel.getFirstElement() instanceof CommitResource) { CommitResource resource = (CommitResource) sel .getFirstElement(); // workspace version ResourceNode leftNode = new ResourceNode(resource .getResource()); // mercurial version RevisionNode rightNode = new RevisionNode( new IStorageMercurialRevision(resource .getResource())); CompareUtils.openCompareDialog(leftNode, rightNode, false); } } }); if (selectableFiles) { selectAllButton.setSelection(false); // Start not selected showUntrackedFilesButton.setSelection(true); // Start selected. showUntrackedFilesButton .addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { if (showUntrackedFilesButton.getSelection()) { commitFilesList .removeFilter(untrackedFilesFilter); } else { commitFilesList.addFilter(untrackedFilesFilter); } commitFilesList.refresh(true); } }); selectAllButton.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { if (selectAllButton.getSelection()) { commitFilesList.setAllChecked(true); } else { commitFilesList.setAllChecked(false); } } }); } setupDefaultCommitMessage(); final Table table = commitFilesList.getTable(); TableColumn[] columns = table.getColumns(); for (int ci = 0; ci < columns.length; ci++) { TableColumn column = columns[ci]; final int colIdx = ci; new TableColumnSorter(commitFilesList, column) { @Override protected int doCompare(Viewer v, Object e1, Object e2) { StructuredViewer viewer = (StructuredViewer) v; ITableLabelProvider lp = ((ITableLabelProvider) viewer .getLabelProvider()); String t1 = lp.getColumnText(e1, colIdx); String t2 = lp.getColumnText(e2, colIdx); return t1.compareTo(t2); } }; } } private void setupDefaultCommitMessage() { commitTextDocument.set(defaultCommitMessage); commitTextBox.setSelectedRange(0, defaultCommitMessage.length()); } private CheckboxTableViewer createFilesList(Composite container, boolean selectable) { int flags = SWT.H_SCROLL | SWT.V_SCROLL | SWT.BORDER; if (selectable) { flags |= SWT.CHECK | SWT.FULL_SELECTION | SWT.MULTI; } else { flags |= SWT.READ_ONLY | SWT.HIDE_SELECTION; } Table table = new Table(container, flags); table.setHeaderVisible(true); table.setLinesVisible(true); TableLayout layout = new TableLayout(); TableColumn col; // Check mark col = new TableColumn(table, SWT.NONE | SWT.BORDER); col.setResizable(false); col.setText(""); layout.addColumnData(new ColumnPixelData(20, false)); // File name col = new TableColumn(table, SWT.NONE); col.setResizable(true); col.setText("File"); layout.addColumnData(new ColumnPixelData(320, true)); // File status col = new TableColumn(table, SWT.NONE); col.setResizable(true); col.setText("Status"); layout.addColumnData(new ColumnPixelData(100, true)); table.setLayout(layout); commitFilesList = new CheckboxTableViewer(table); commitFilesList.setContentProvider(new ArrayContentProvider()); commitFilesList.setLabelProvider(new CommitResourceLabelProvider()); CommitResource[] commitResources = new CommitResourceUtil(getRoot()) .getCommitResources(inResources); commitFilesList.setInput(commitResources); commitFilesList.addFilter(committableFilesFilter); // auto-check all tracked elements List<CommitResource> tracked = new ArrayList<CommitResource>(); for (CommitResource commitResource : commitResources) { if (commitResource.getStatus() != CommitDialog.FILE_UNTRACKED) { tracked.add(commitResource); } } commitFilesList.setCheckedElements(tracked.toArray()); return commitFilesList; } private File[] convertToFiles(Object[] objs) { ArrayList<File> list = new ArrayList<File>(); for (int res = 0; res < objs.length; res++) { if (objs[res] instanceof CommitResource != true) { return null; } CommitResource resource = (CommitResource) objs[res]; list.add(resource.getPath()); } return list.toArray(new File[0]); } private IResource[] convertToResource(Object[] objs) { ArrayList<IResource> list = new ArrayList<IResource>(); for (int res = 0; res < objs.length; res++) { if (objs[res] instanceof CommitResource != true) { return null; } CommitResource resource = (CommitResource) objs[res]; IResource thisResource = resource.getResource(); if (thisResource != null) { list.add(thisResource); } } return list.toArray(new IResource[0]); } private File[] getToAddList(Object[] objs) { ArrayList<File> list = new ArrayList<File>(); for (int res = 0; res < objs.length; res++) { if (objs[res] instanceof CommitResource != true) { return null; } CommitResource resource = (CommitResource) objs[res]; if (resource.getStatus().equals(CommitDialog.FILE_UNTRACKED)) { list.add(resource.getPath()); } } return list.toArray(new File[0]); } private File[] getToRemoveList(Object[] objs) { ArrayList<File> list = new ArrayList<File>(); for (int res = 0; res < objs.length; res++) { if (objs[res] instanceof CommitResource != true) { return null; } CommitResource resource = (CommitResource) objs[res]; if (resource.getStatus().equals(CommitDialog.FILE_DELETED)) { list.add(resource.getPath()); } } return list.toArray(new File[0]); } private List<IResource> getToAddResourceList(Object[] objs) { ArrayList<IResource> list = new ArrayList<IResource>(); for (int res = 0; res < objs.length; res++) { if (objs[res] instanceof CommitResource != true) { return null; } CommitResource resource = (CommitResource) objs[res]; if (resource.getStatus().equals(CommitDialog.FILE_UNTRACKED)) { list.add(resource.getResource()); } } return list; } private List<IResource> getToRemoveResourceList(Object[] objs) { ArrayList<IResource> list = new ArrayList<IResource>(); for (int res = 0; res < objs.length; res++) { if (objs[res] instanceof CommitResource != true) { return null; } CommitResource resource = (CommitResource) objs[res]; if (resource.getStatus().equals(CommitDialog.FILE_DELETED)) { list.add(resource.getResource()); } } return list; } /** * Override the OK button pressed to capture the info we want first and then * call super. */ @Override protected void okPressed() { filesToAdd = getToAddList(commitFilesList.getCheckedElements()); resourcesToAdd = getToAddResourceList(commitFilesList .getCheckedElements()); filesToCommit = convertToFiles(commitFilesList.getCheckedElements()); resourcesToCommit = convertToResource(commitFilesList .getCheckedElements()); filesToRemove = getToRemoveList(commitFilesList.getCheckedElements()); resourcesToRemove = getToRemoveResourceList(commitFilesList .getCheckedElements()); commitMessage = commitTextDocument.get(); super.okPressed(); } @Override protected Point getInitialSize() { return new Point(477, 562); } protected void setRoot(HgRoot root) { this.root = root; } protected HgRoot getRoot() { return root; } public File[] getFilesToRemove() { return filesToRemove; } public List<IResource> getResourcesToRemove() { return resourcesToRemove; } }
package net.echinopsii.ariane.community.messaging.nats; import akka.actor.ActorRef; import io.nats.client.Connection; import io.nats.client.Message; import io.nats.client.SyncSubscription; import net.echinopsii.ariane.community.messaging.api.*; import net.echinopsii.ariane.community.messaging.common.MomAkkaAbsServiceFactory; import net.echinopsii.ariane.community.messaging.common.MomAkkaService; import net.echinopsii.ariane.community.messaging.common.MomLoggerFactory; import org.slf4j.Logger; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.concurrent.TimeoutException; public class ServiceFactory extends MomAkkaAbsServiceFactory implements MomServiceFactory<MomAkkaService, AppMsgWorker, AppMsgFeeder, String> { private static final Logger log = MomLoggerFactory.getLogger(ServiceFactory.class); private static MsgTranslator translator = new MsgTranslator(); public ServiceFactory(Client client) { super(client); } private static ActorRef createRequestActor(String source, MomClient client, AppMsgWorker requestCB) { return ((Client)client).getActorSystem().actorOf( MsgRequestActor.props(((Client)client), requestCB), source + "_msgWorker" ); } private static MomConsumer createConsumer(final String source, final ActorRef runnableReqActor, final Connection connection) { return new MomConsumer() { private boolean isRunning = false; @Override public void run() { SyncSubscription subs = null; try { subs = connection.subscribeSync(source); isRunning = true; while (isRunning) { Map<String, Object> finalMessage = null; try { Message msg = subs.nextMessage(10); finalMessage = translator.decode(msg); if (((HashMap)finalMessage).containsKey(MomMsgTranslator.MSG_TRACE)) ((MomLogger)log).setTraceLevel(true); ((MomLogger)log).traceMessage("MomConsumer(" + source + ").run", finalMessage); if (((HashMap)finalMessage).containsKey(MomMsgTranslator.MSG_TRACE)) ((MomLogger)log).setTraceLevel(false); if (msg!=null && isRunning) runnableReqActor.tell(msg, null); } catch (TimeoutException e) { if (finalMessage!=null && ((HashMap)finalMessage).containsKey(MomMsgTranslator.MSG_TRACE)) ((MomLogger)log).setTraceLevel(false); log.debug("no message found during last 10 ms"); } catch (IllegalStateException | IOException e) { if (finalMessage!=null && ((HashMap)finalMessage).containsKey(MomMsgTranslator.MSG_TRACE)) ((MomLogger)log).setTraceLevel(false); if (isRunning) log.error("[source: " + source + "]" + e.getMessage()); } } } catch (Exception e) { e.printStackTrace(); } finally { if (!connection.isClosed() && subs!=null) { try { subs.unsubscribe(); subs.close(); } catch (Exception e) { e.printStackTrace(); } } } } @Override public boolean isRunning() { return isRunning; } @Override public void start() { new Thread(this, source + "_consumer").start(); } @Override public void stop() { isRunning = false; } }; } private static MomMsgGroupSubServiceMgr createSessionManager(final String source, final ActorRef runnableReqActor, final Connection connection) { return new MomMsgGroupSubServiceMgr() { HashMap<String, MomConsumer> sessionConsumersRegistry = new HashMap<>(); @Override public void openMsgGroupSubService(String groupID) { final String sessionSource = groupID + "-" + source; sessionConsumersRegistry.put(groupID, ServiceFactory.createConsumer(sessionSource, runnableReqActor, connection)); sessionConsumersRegistry.get(groupID).start(); } @Override public void closeMsgGroupSubService(String groupID) { if (sessionConsumersRegistry.containsKey(groupID)) { sessionConsumersRegistry.get(groupID).stop(); sessionConsumersRegistry.remove(groupID); } } @Override public void stop() { HashMap<String, MomConsumer> sessionConsumersRegistryClone = (HashMap<String, MomConsumer>) sessionConsumersRegistry.clone(); for (String sessionID : sessionConsumersRegistryClone.keySet()) { sessionConsumersRegistry.get(sessionID).stop(); sessionConsumersRegistry.remove(sessionID); } } }; } @Override public MomAkkaService msgGroupRequestService(String source, AppMsgWorker requestCB) { final Connection connection = ((Client)super.getMomClient()).getConnection(); MomAkkaService ret = null; ActorRef requestActor; MomConsumer consumer ; MomMsgGroupSubServiceMgr sessionMgr = null; if (connection != null && !connection.isClosed()) { requestActor = ServiceFactory.createRequestActor(source, super.getMomClient(), requestCB); consumer = ServiceFactory.createConsumer(source, requestActor, connection); consumer.start(); sessionMgr = ServiceFactory.createSessionManager(source, requestActor, connection); ret = new MomAkkaService().setMsgWorker(requestActor).setConsumer(consumer).setClient((Client) super.getMomClient()). setMsgGroupSubServiceMgr(sessionMgr); super.getServices().add(ret); } return ret; } @Override public MomAkkaService requestService(String source, AppMsgWorker requestCB) { final Connection connection = ((Client)super.getMomClient()).getConnection(); MomAkkaService ret = null; ActorRef requestActor = null; MomConsumer consumer = null; if (connection != null && !connection.isClosed()) { //connection.publish(Message); requestActor = ServiceFactory.createRequestActor(source, super.getMomClient(), requestCB); consumer = ServiceFactory.createConsumer(source, requestActor, connection); consumer.start(); ret = new MomAkkaService().setMsgWorker(requestActor).setConsumer(consumer).setClient( ((Client) super.getMomClient()) ); super.getServices().add(ret); } return ret; } @Override public MomAkkaService feederService(String baseDestination, String selector, int interval, AppMsgFeeder feederCB) { MomAkkaService ret = null; Connection connection = ((Client)super.getMomClient()).getConnection(); if (connection != null && !connection.isClosed()) { ActorRef feeder = ((Client)super.getMomClient()).getActorSystem().actorOf(MsgFeederActor.props( ((Client)super.getMomClient()),baseDestination, selector, feederCB) ); ret = new MomAkkaService().setClient(((Client) super.getMomClient())).setMsgFeeder(feeder, interval); super.getServices().add(ret); } return ret; } @Override public MomAkkaService subscriberService(String source, String selector, AppMsgWorker feedCB) { MomAkkaService ret = null; ActorRef subsActor ; MomConsumer consumer ; final Connection connection = ((Client)super.getMomClient()).getConnection(); if (connection != null && !connection.isClosed()) { final String subject = source + ((selector !=null && !selector.equals("")) ? "." + selector : ""); subsActor = ((Client)super.getMomClient()).getActorSystem().actorOf( MsgSubsActor.props(feedCB), subject + "_msgWorker" ); consumer = ServiceFactory.createConsumer(subject, subsActor, connection); consumer.start(); ret = new MomAkkaService().setMsgWorker(subsActor).setConsumer(consumer).setClient( ((Client)super.getMomClient()) ); super.getServices().add(ret); } return ret; } }
package org.navalplanner.web.planner.order; import static org.navalplanner.web.I18nHelper._; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.navalplanner.business.common.IAdHocTransactionService; import org.navalplanner.business.common.IOnTransaction; import org.navalplanner.business.common.exceptions.InstanceNotFoundException; import org.navalplanner.business.orders.entities.Order; import org.navalplanner.business.planner.daos.IDayAssignmentDAO; import org.navalplanner.business.planner.daos.ITaskElementDAO; import org.navalplanner.business.planner.entities.DayAssignment; import org.navalplanner.business.planner.entities.DerivedAllocation; import org.navalplanner.business.planner.entities.DerivedDayAssignment; import org.navalplanner.business.planner.entities.ResourceAllocation; import org.navalplanner.business.planner.entities.TaskElement; import org.navalplanner.business.planner.entities.TaskGroup; import org.navalplanner.business.scenarios.IScenarioManager; import org.navalplanner.business.scenarios.daos.IScenarioDAO; import org.navalplanner.business.scenarios.entities.OrderVersion; import org.navalplanner.business.scenarios.entities.Scenario; import org.navalplanner.web.common.concurrentdetection.OnConcurrentModification; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import org.zkoss.ganttz.extensions.IContext; import org.zkoss.zul.Messagebox; @Component @Scope(BeanDefinition.SCOPE_PROTOTYPE) @OnConcurrentModification(goToPage = "/planner/index.zul;company_scheduling") public class SaveCommand implements ISaveCommand { private static final Log LOG = LogFactory.getLog(SaveCommand.class); @Autowired private ITaskElementDAO taskElementDAO; @Autowired private IDayAssignmentDAO dayAssignmentDAO; private PlanningState state; private Order order; @Autowired private IAdHocTransactionService transactionService; private List<IAfterSaveListener> listeners = new ArrayList<IAfterSaveListener>(); @Autowired private IScenarioManager scenarioManager; @Autowired private IScenarioDAO scenarioDAO; @Override public void setState(PlanningState state) { this.state = state; } @Override public void setOrder(Order order) { this.order = order; } @Override public void doAction(IContext<TaskElement> context) { final Scenario currentScenario = transactionService .runOnReadOnlyTransaction(new IOnTransaction<Scenario>() { @Override public Scenario execute() { return scenarioManager.getCurrent(); } }); final boolean scenarioIsOwner = scenarioIsOwner(currentScenario); if (scenarioIsOwner || userAcceptsCreateANewOrderVersion()) { transactionService.runOnTransaction(new IOnTransaction<Void>() { @Override public Void execute() { doTheSaving(currentScenario, scenarioIsOwner); return null; } }); fireAfterSave(); notifyUserThatSavingIsDone(); } } private void fireAfterSave() { for (IAfterSaveListener listener : listeners) { listener.onAfterSave(); } } private void notifyUserThatSavingIsDone() { try { Messagebox.show(_("Scheduling saved"), _("Information"), Messagebox.OK, Messagebox.INFORMATION); } catch (InterruptedException e) { throw new RuntimeException(e); } } private void doTheSaving(Scenario currentScenario, boolean scenarioIsOwner) { saveTasksToSave(); removeTasksToRemove(); taskElementDAO.removeOrphanedDayAssignments(); if (!scenarioIsOwner) { createAndSaveNewOrderVersion(currentScenario); } } private void removeTasksToRemove() { for (TaskElement taskElement : state.getToRemove()) { if (taskElementDAO.exists(taskElement.getId())) { // it might have already been saved in a previous save action try { taskElementDAO.remove(taskElement.getId()); } catch (InstanceNotFoundException e) { throw new RuntimeException(e); } } } } private void saveTasksToSave() { for (TaskElement taskElement : state.getTasksToSave()) { removeDetachedDerivedDayAssignments(taskElement); taskElementDAO.save(taskElement); dontPoseAsTransient(taskElement); } if (!state.getTasksToSave().isEmpty()) { updateRootTaskPosition(); } } private void removeDetachedDerivedDayAssignments(TaskElement taskElement) { for (ResourceAllocation<?> each : taskElement.getSatisfiedResourceAllocations()) { for (DerivedAllocation eachDerived : each.getDerivedAllocations()) { removeAssigments(eachDerived.getDetached()); eachDerived.clearDetached(); } } } private void removeAssigments(Set<DerivedDayAssignment> detached) { List<DerivedDayAssignment> toRemove = new ArrayList<DerivedDayAssignment>(); for (DerivedDayAssignment eachAssignment : detached) { if (!eachAssignment.isNewObject()) { toRemove.add(eachAssignment); } } dayAssignmentDAO.removeDerived(toRemove); } // newly added TaskElement such as milestones must be called // dontPoseAsTransientObjectAnymore private void dontPoseAsTransient(TaskElement taskElement) { if (taskElement.isNewObject()) { taskElement.dontPoseAsTransientObjectAnymore(); } Set<ResourceAllocation<?>> resourceAllocations = taskElement.getSatisfiedResourceAllocations(); dontPoseAsTransient(resourceAllocations); if (!taskElement.isLeaf()) { for (TaskElement each : taskElement.getChildren()) { dontPoseAsTransient(each); } } } private void dontPoseAsTransient( Set<ResourceAllocation<?>> resourceAllocations) { for (ResourceAllocation<?> each : resourceAllocations) { each.dontPoseAsTransientObjectAnymore(); for (DayAssignment eachAssignment : each.getAssignments()) { eachAssignment.dontPoseAsTransientObjectAnymore(); } for (DerivedAllocation eachDerived : each.getDerivedAllocations()) { eachDerived.dontPoseAsTransientObjectAnymore(); for (DerivedDayAssignment eachAssignment : eachDerived .getAssignments()) { eachAssignment.dontPoseAsTransientObjectAnymore(); } } } } private void updateRootTaskPosition() { TaskGroup rootTask = state.getRootTask(); final Date min = minDate(state.getTasksToSave()); if (min != null) { rootTask.setStartDate(min); } final Date max = maxDate(state.getTasksToSave()); if (max != null) { rootTask.setEndDate(max); } taskElementDAO.save(rootTask); } private Date maxDate(Collection<? extends TaskElement> tasksToSave) { List<Date> endDates = toEndDates(tasksToSave); return endDates.isEmpty() ? null : Collections.max(endDates); } private List<Date> toEndDates(Collection<? extends TaskElement> tasksToSave) { List<Date> result = new ArrayList<Date>(); for (TaskElement taskElement : tasksToSave) { Date endDate = taskElement.getEndDate(); if (endDate != null) { result.add(endDate); } else { LOG.warn("the task" + taskElement + " has null end date"); } } return result; } private Date minDate(Collection<? extends TaskElement> tasksToSave) { List<Date> startDates = toStartDates(tasksToSave); return startDates.isEmpty() ? null : Collections.min(startDates); } private List<Date> toStartDates( Collection<? extends TaskElement> tasksToSave) { List<Date> result = new ArrayList<Date>(); for (TaskElement taskElement : tasksToSave) { Date startDate = taskElement.getStartDate(); if (startDate != null) { result.add(startDate); } else { LOG.warn("the task" + taskElement + " has null start date"); } } return result; } @Override public String getName() { return _("Save"); } @Override public void addListener(IAfterSaveListener listener) { listeners.add(listener); } @Override public void removeListener(IAfterSaveListener listener) { listeners.remove(listener); } @Override public String getImage() { return "/common/img/ico_save.png"; } private boolean scenarioIsOwner(Scenario currentScenario) { OrderVersion orderVersion = currentScenario.getOrderVersion(order); if (orderVersion == null) { throw new RuntimeException( "Order version must never be null for an order in any scenario"); } return currentScenario.getId().equals( orderVersion.getOwnerScenario().getId()); } private boolean userAcceptsCreateANewOrderVersion() { try { int status = Messagebox .show( _("Confirm creating a new order version for this scenario and derived. Are you sure?"), _("New order version"), Messagebox.OK | Messagebox.CANCEL, Messagebox.QUESTION); return (Messagebox.OK == status); } catch (InterruptedException e) { throw new RuntimeException(e); } } private void createAndSaveNewOrderVersion(Scenario currentScenario) { OrderVersion previousOrderVersion = currentScenario .getOrderVersion(order); OrderVersion newOrderVersion = OrderVersion .createInitialVersion(currentScenario); currentScenario.setOrderVersion(order, newOrderVersion); scenarioDAO.save(currentScenario); for (Scenario scenario : scenarioDAO .getDerivedScenarios(currentScenario)) { if ((scenario.getOrderVersion(order) != null) && (scenario.getOrderVersion(order).getId() .equals(previousOrderVersion.getId()))) { scenario.setOrderVersion(order, newOrderVersion); scenarioDAO.save(scenario); } } } }
package org.nakedobjects.object.persistence.defaults; import org.nakedobjects.NakedObjects; import org.nakedobjects.object.InternalCollection; import org.nakedobjects.object.Naked; import org.nakedobjects.object.NakedObject; import org.nakedobjects.object.NakedObjectRuntimeException; import org.nakedobjects.object.Persistable; import org.nakedobjects.object.persistence.Oid; import org.nakedobjects.object.persistence.OidGenerator; import org.nakedobjects.object.reflect.NakedObjectField; import org.nakedobjects.object.reflect.OneToManyAssociation; import org.nakedobjects.utility.ToString; import org.apache.log4j.Logger; public class DefaultPersistAlgorithm implements PersistAlgorithm { private static final Logger LOG = Logger.getLogger(DefaultPersistAlgorithm.class); private OidGenerator oidGenerator; private final synchronized Oid createOid(Naked object) { Oid oid = oidGenerator.next(object); LOG.debug("createOid " + oid); return oid; } public void init() {} public void makePersistent(NakedObject object, PersistedObjectAdder manager) { if (object.getResolveState().isPersistent() || object.getSpecification().persistable() == Persistable.TRANSIENT) { return; } LOG.info("persist " + object); NakedObjects.getObjectLoader().madePersistent(object, createOid(object)); NakedObjectField[] fields = object.getFields(); for (int i = 0; i < fields.length; i++) { NakedObjectField field = fields[i]; if (field.isDerived()) { continue; } else if (field.isValue()) { continue; } else if (field instanceof OneToManyAssociation) { InternalCollection collection = (InternalCollection) object.getField(field); collection.setOid(createOid(collection)); collection.setResolved(); for (int j = 0; j < collection.size(); j++) { makePersistent(collection.elementAt(j), manager); } } else { Object fieldValue = object.getField(field); if (fieldValue == null) { continue; } if (!(fieldValue instanceof NakedObject)) { throw new NakedObjectRuntimeException(fieldValue + " is not a NakedObject"); } makePersistent((NakedObject) fieldValue, manager); } } manager.createObject(object); } public String name() { return "Simple Bottom Up Persistence Walker"; } /** * Expose as a .NET property * * @property */ public void set_OidGenerator(OidGenerator oidGenerator) { this.oidGenerator = oidGenerator; } public void setOidGenerator(OidGenerator oidGenerator) { this.oidGenerator = oidGenerator; } public void shutdown() { oidGenerator.shutdown(); oidGenerator = null; } public String toString() { ToString toString = new ToString(this); if (oidGenerator != null) { toString.append("oidGenerator", oidGenerator.name()); } return toString.toString(); } }
package tuwien.inso.mnsa.nokiaprovider.intern; import java.io.IOException; import javax.smartcardio.ATR; import javax.smartcardio.CommandAPDU; import javax.smartcardio.ResponseAPDU; @SuppressWarnings("restriction") public interface Connection { /** * Establishes this connection. does nothing if the connection is already * established. * * @return * @throws IOException */ void connect() throws IOException; /** * Closes this connection. does nothing if the connection is already closed. * * @return * @throws IOException */ void disconnect(); /** * Returns true if this connection is open * * @return * @throws IOException */ boolean isConnected(); /** * Returns true if there has been a card present and no communication error * happened since then and the connection has not been closed. * * @throws IOException */ boolean isCardPresent() throws IOException; /** * Sends an APDU to the card and returns the response. * * @return * @throws IOException * if no card is present, no logical connection to it is * established or some communication error occurred. */ ResponseAPDU transceive(CommandAPDU capdu) throws IOException; /** * Gets the ATR. * * @return * @throws IOException */ ATR getATR() throws IOException; /** * Returns the unique name of this connection. * * @return */ String getName(); /** * Closes the previously established connection (channel) to the card. If no * connection is established, this method does nothing. * * @throws IOException * If a problem occurs during the closing of the channel */ void closeCardConnection() throws IOException; /** * Opens the logical connection (channel) to the card (to enable sending * APDUs down the channel). If the connection is already established, this * method does nothing. * * @throws IOException * If a problem occurs during the opening of the channel */ void openCardConnection() throws IOException; }
package org.eclipse.egit.ui.internal.merge; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.eclipse.compare.CompareConfiguration; import org.eclipse.compare.CompareEditorInput; import org.eclipse.compare.structuremergeviewer.DiffNode; import org.eclipse.compare.structuremergeviewer.Differencer; import org.eclipse.compare.structuremergeviewer.IDiffContainer; import org.eclipse.compare.structuremergeviewer.IDiffElement; import org.eclipse.core.resources.IResource; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.IPath; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.Path; import org.eclipse.egit.core.AdaptableFileTreeIterator; import org.eclipse.egit.core.internal.CompareCoreUtils; import org.eclipse.egit.core.internal.storage.GitFileRevision; import org.eclipse.egit.core.project.RepositoryMapping; import org.eclipse.egit.ui.Activator; import org.eclipse.egit.ui.UIText; import org.eclipse.egit.ui.internal.FileRevisionTypedElement; import org.eclipse.egit.ui.internal.dialogs.CompareTreeView; import org.eclipse.jgit.dircache.DirCacheIterator; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.revwalk.RevWalk; import org.eclipse.jgit.treewalk.AbstractTreeIterator; import org.eclipse.jgit.treewalk.CanonicalTreeParser; import org.eclipse.jgit.treewalk.TreeWalk; import org.eclipse.jgit.treewalk.WorkingTreeIterator; import org.eclipse.jgit.treewalk.filter.OrTreeFilter; import org.eclipse.jgit.treewalk.filter.PathFilter; import org.eclipse.jgit.treewalk.filter.TreeFilter; import org.eclipse.osgi.util.NLS; import org.eclipse.swt.graphics.Image; import org.eclipse.ui.ISharedImages; import org.eclipse.ui.PlatformUI; /** * A Git-specific {@link CompareEditorInput} */ public class GitCompareEditorInput extends CompareEditorInput { private static final Image FOLDER_IMAGE = PlatformUI.getWorkbench() .getSharedImages().getImage(ISharedImages.IMG_OBJ_FOLDER); private final String baseVersion; private final String compareVersion; private final IResource[] resources; private final List<String> filterPathStrings = new ArrayList<String>(); private final Map<IPath, IDiffContainer> diffRoots = new HashMap<IPath, IDiffContainer>(); private Repository repository; /** * @param compareVersion * (shown on the left side in compare); currently only commit IDs * are supported * @param baseVersion * (shown on the right side in compare); currently only commit * IDs are supported * @param resources * as selected by the user */ public GitCompareEditorInput(String compareVersion, String baseVersion, IResource... resources) { super(new CompareConfiguration()); this.resources = convertResourceInput(resources); this.baseVersion = baseVersion; this.compareVersion = compareVersion; } /** * @param compareVersion * (shown on the left side in compare); currently only commit IDs * are supported * @param baseVersion * (shown on the right side in compare); currently only commit * IDs are supported * @param repository * as selected by the user */ public GitCompareEditorInput(String compareVersion, String baseVersion, Repository repository) { super(new CompareConfiguration()); this.resources = new IResource[0]; this.baseVersion = baseVersion; this.compareVersion = compareVersion; this.repository = repository; } @Override protected Object prepareInput(IProgressMonitor monitor) throws InvocationTargetException, InterruptedException { // make sure all resources belong to the same repository RevWalk rw = null; try { monitor.beginTask( UIText.GitCompareEditorInput_CompareResourcesTaskName, IProgressMonitor.UNKNOWN); for (IResource resource : resources) { RepositoryMapping map = RepositoryMapping.getMapping(resource .getProject()); if (repository != null && repository != map.getRepository()) throw new InvocationTargetException( new IllegalStateException( UIText.GitCompareEditorInput_ResourcesInDifferentReposMessagge)); String repoRelativePath = map.getRepoRelativePath(resource); filterPathStrings.add(repoRelativePath); DiffNode node = new DiffNode(Differencer.NO_CHANGE) { @Override public Image getImage() { return FOLDER_IMAGE; } }; diffRoots .put(new Path(map.getRepoRelativePath(resource)), node); repository = map.getRepository(); } if (repository == null) throw new InvocationTargetException( new IllegalStateException( UIText.GitCompareEditorInput_ResourcesInDifferentReposMessagge)); if (monitor.isCanceled()) throw new InterruptedException(); rw = new RevWalk(repository); final RevCommit baseCommit; try { baseCommit = rw.parseCommit(repository.resolve(baseVersion)); } catch (IOException e) { throw new InvocationTargetException(e); } final RevCommit compareCommit; if (compareVersion == null) compareCommit = null; else try { compareCommit = rw.parseCommit(repository .resolve(compareVersion)); } catch (IOException e) { throw new InvocationTargetException(e); } if (monitor.isCanceled()) throw new InterruptedException(); // set the labels CompareConfiguration config = getCompareConfiguration(); config.setLeftLabel(compareVersion); config.setRightLabel(baseVersion); // set title and icon if (resources.length == 0) { Object[] titleParameters = new Object[] { Activator.getDefault().getRepositoryUtil() .getRepositoryName(repository), compareVersion, baseVersion }; setTitle(NLS.bind(UIText.GitCompareEditorInput_EditorTitle, titleParameters)); } else if (resources.length == 1) { Object[] titleParameters = new Object[] { resources[0].getFullPath().makeRelative().toString(), compareVersion, baseVersion }; setTitle(NLS.bind( UIText.GitCompareEditorInput_EditorTitleSingleResource, titleParameters)); } else setTitle(NLS .bind( UIText.GitCompareEditorInput_EditorTitleMultipleResources, compareVersion, baseVersion)); // build the nodes try { return buildDiffContainer(baseCommit, compareCommit, monitor); } catch (IOException e) { throw new InvocationTargetException(e); } } finally { if (rw != null) rw.dispose(); monitor.done(); } } @Override protected void contentsCreated() { super.contentsCreated(); // select the first conflict getNavigator().selectChange(true); } @Override protected void handleDispose() { super.handleDispose(); // we do NOT dispose the images, as these are shared } private IDiffContainer buildDiffContainer(RevCommit baseCommit, RevCommit compareCommit, IProgressMonitor monitor) throws IOException, InterruptedException { boolean useIndex = compareVersion.equals(CompareTreeView.INDEX_VERSION); boolean checkIgnored = false; IDiffContainer result = new DiffNode(Differencer.CONFLICTING); TreeWalk tw = new TreeWalk(repository); // filter by selected resources if (filterPathStrings.size() > 1) { List<TreeFilter> suffixFilters = new ArrayList<TreeFilter>(); for (String filterPath : filterPathStrings) suffixFilters.add(PathFilter.create(filterPath)); TreeFilter otf = OrTreeFilter.create(suffixFilters); tw.setFilter(otf); } else if (filterPathStrings.size() > 0) { String path = filterPathStrings.get(0); if (path.length() != 0) tw.setFilter(PathFilter.create(path)); } tw.setRecursive(true); int baseTreeIndex; if (baseCommit == null) { // compare workspace with something checkIgnored = true; baseTreeIndex = tw.addTree(new AdaptableFileTreeIterator( repository, ResourcesPlugin.getWorkspace().getRoot())); } else baseTreeIndex = tw.addTree(new CanonicalTreeParser(null, repository .newObjectReader(), baseCommit.getTree())); int compareTreeIndex; if (!useIndex) compareTreeIndex = tw.addTree(new CanonicalTreeParser(null, repository.newObjectReader(), compareCommit.getTree())); else // compare something with the index compareTreeIndex = tw.addTree(new DirCacheIterator(repository .readDirCache())); try { while (tw.next()) { if (monitor.isCanceled()) throw new InterruptedException(); AbstractTreeIterator compareVersionIterator = tw.getTree( compareTreeIndex, AbstractTreeIterator.class); AbstractTreeIterator baseVersionIterator = tw.getTree( baseTreeIndex, AbstractTreeIterator.class); if (checkIgnored && baseVersionIterator != null && ((WorkingTreeIterator) baseVersionIterator) .isEntryIgnored()) continue; if (compareVersionIterator != null && baseVersionIterator != null) { boolean equalContent = compareVersionIterator .getEntryObjectId().equals( baseVersionIterator.getEntryObjectId()); if (equalContent) continue; } String encoding = null; GitFileRevision compareRev = null; if (compareVersionIterator != null) { String entryPath = compareVersionIterator.getEntryPathString(); encoding = CompareCoreUtils.getResourceEncoding(repository, entryPath); if (!useIndex) compareRev = GitFileRevision.inCommit(repository, compareCommit, entryPath, tw.getObjectId(compareTreeIndex)); else compareRev = GitFileRevision.inIndex(repository, entryPath); } GitFileRevision baseRev = null; if (baseVersionIterator != null) { String entryPath = baseVersionIterator.getEntryPathString(); if (encoding == null) { encoding = CompareCoreUtils.getResourceEncoding(repository, entryPath); } baseRev = GitFileRevision.inCommit(repository, baseCommit, entryPath, tw.getObjectId(baseTreeIndex)); } if (compareVersionIterator != null && baseVersionIterator != null) { monitor.setTaskName(baseVersionIterator .getEntryPathString()); // content exists on both sides add(result, baseVersionIterator.getEntryPathString(), new DiffNode(new FileRevisionTypedElement(compareRev, encoding), new FileRevisionTypedElement(baseRev, encoding))); } else if (baseVersionIterator != null && compareVersionIterator == null) { monitor.setTaskName(baseVersionIterator .getEntryPathString()); // only on base side add(result, baseVersionIterator.getEntryPathString(), new DiffNode(Differencer.DELETION | Differencer.RIGHT, null, null, new FileRevisionTypedElement(baseRev, encoding))); } else if (compareVersionIterator != null && baseVersionIterator == null) { monitor.setTaskName(compareVersionIterator .getEntryPathString()); // only on compare side add(result, compareVersionIterator.getEntryPathString(), new DiffNode(Differencer.ADDITION | Differencer.RIGHT, null, new FileRevisionTypedElement(compareRev, encoding), null)); } if (monitor.isCanceled()) throw new InterruptedException(); } return result; } finally { tw.release(); } } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((baseVersion == null) ? 0 : baseVersion.hashCode()); result = prime * result + ((compareVersion == null) ? 0 : compareVersion.hashCode()); result = prime * result + ((repository == null) ? 0 : repository.getDirectory() .hashCode()); result = prime * result + Arrays.hashCode(resources); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; GitCompareEditorInput other = (GitCompareEditorInput) obj; if (baseVersion == null) { if (other.baseVersion != null) return false; } else if (!baseVersion.equals(other.baseVersion)) return false; if (compareVersion == null) { if (other.compareVersion != null) return false; } else if (!compareVersion.equals(other.compareVersion)) return false; if (repository == null) { if (other.repository != null) return false; } else if (other.repository == null || !repository.getDirectory().equals( other.repository.getDirectory())) return false; if (!Arrays.equals(resources, other.resources)) return false; return true; } private void add(IDiffContainer result, String filePath, DiffNode diffNode) { IDiffContainer container = getFileParent(result, filePath); container.add(diffNode); diffNode.setParent(container); } private IDiffContainer getFileParent(IDiffContainer root, String filePath) { IPath path = new Path(filePath); IDiffContainer child = root; if (diffRoots.isEmpty()) { for (int i = 0; i < path.segmentCount() - 1; i++) child = getOrCreateChild(child, path.segment(i)); return child; } else { for (Entry<IPath, IDiffContainer> entry : diffRoots.entrySet()) { if (entry.getKey().isPrefixOf(path)) { for (int i = entry.getKey().segmentCount(); i < path .segmentCount() - 1; i++) child = getOrCreateChild(child, path.segment(i)); return child; } } return null; } } private DiffNode getOrCreateChild(IDiffContainer parent, final String name) { for (IDiffElement child : parent.getChildren()) { if (child.getName().equals(name)) { return ((DiffNode) child); } } DiffNode child = new DiffNode(parent, Differencer.NO_CHANGE) { @Override public String getName() { return name; } @Override public Image getImage() { return FOLDER_IMAGE; } }; return child; } private IResource[] convertResourceInput(final IResource[] input) { if (input.length > 0) { // we must make sure to only show the topmost resources as roots List<IResource> resourceList = new ArrayList<IResource>( input.length); List<IPath> allPaths = new ArrayList<IPath>(input.length); for (IResource originalInput : input) { allPaths.add(originalInput.getFullPath()); } for (IResource originalInput : input) { boolean skip = false; for (IPath path : allPaths) { if (path.isPrefixOf(originalInput.getFullPath()) && path.segmentCount() < originalInput .getFullPath().segmentCount()) { skip = true; break; } } if (!skip) resourceList.add(originalInput); } return resourceList.toArray(new IResource[resourceList.size()]); } else return input; } }
package org.metaborg.meta.nabl2.solver; import java.time.Duration; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.Set; import org.metaborg.meta.nabl2.constraints.IConstraint; import org.metaborg.meta.nabl2.constraints.messages.IMessageContent; import org.metaborg.meta.nabl2.constraints.messages.IMessageInfo; import org.metaborg.meta.nabl2.constraints.messages.MessageContent; import org.metaborg.meta.nabl2.terms.ITermVar; import org.metaborg.meta.nabl2.unification.Unifier; import org.metaborg.meta.nabl2.util.functions.Function1; import org.metaborg.util.log.ILogger; import org.metaborg.util.log.LoggerUtils; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; public class Solver { private static final ILogger logger = LoggerUtils.logger(Solver.class); private final Unifier unifier; private final Map<Class<? extends IConstraint>, ISolverComponent<?>> components; private final AstSolver astSolver; private final NamebindingSolver namebindingSolver; private final RelationSolver relationSolver; private final SymbolicSolver symSolver; private final Set<IConstraint> unsolved; private final List<IMessageInfo> messages; private Solver(SolverConfig config, Function1<String, ITermVar> fresh) { this.unifier = new Unifier(); this.components = Maps.newHashMap(); this.unsolved = Sets.newHashSet(); addComponent(new BaseSolver()); addComponent(new EqualitySolver(unifier)); addComponent(this.astSolver = new AstSolver(unifier)); addComponent(this.namebindingSolver = new NamebindingSolver(config.getResolutionParams(), unifier)); addComponent(this.relationSolver = new RelationSolver(config.getRelations(), config.getFunctions(), unifier)); addComponent(new SetSolver(namebindingSolver.nameSets(), unifier)); addComponent(this.symSolver = new SymbolicSolver()); addComponent(new PolymorphismSolver(unifier, fresh)); this.messages = Lists.newArrayList(); } private void addComponent(ISolverComponent<?> component) { components.put(component.getConstraintClass(), component); } @SuppressWarnings({ "unchecked", "rawtypes" }) private Optional<ISolverComponent<IConstraint>> findComponent(Class<? extends IConstraint> constraintClass) { ISolverComponent component; if((component = components.get(constraintClass)) == null) { for(Entry<Class<? extends IConstraint>, ISolverComponent<?>> entry : components.entrySet()) { if(entry.getKey().isAssignableFrom(constraintClass)) { component = entry.getValue(); break; } } } return Optional.ofNullable(component); } private void add(Iterable<IConstraint> constraints) throws InterruptedException { for(IConstraint constraint : constraints) { if(Thread.currentThread().isInterrupted()) { throw new InterruptedException(); } Optional<ISolverComponent<IConstraint>> maybeComponent = findComponent(constraint.getClass()); if(maybeComponent.isPresent()) { ISolverComponent<IConstraint> component = maybeComponent.get(); component.getTimer().start(); try { component.add(constraint); } catch(UnsatisfiableException e) { messages.addAll(e.getMessages()); } finally { component.getTimer().stop(); } } else { unsolved.add(constraint); } } } private void iterate() throws InterruptedException { outer: while(true) { if(Thread.currentThread().isInterrupted()) { throw new InterruptedException(); } for(ISolverComponent<?> component : components.values()) { component.getTimer().start(); try { if(component.iterate()) { continue outer; } } catch(UnsatisfiableException e) { messages.addAll(e.getMessages()); continue outer; } finally { component.getTimer().stop(); } } return; } } private void finish(boolean errorsOnUnsolved) throws InterruptedException { for(ISolverComponent<?> component : components.values()) { if(Thread.currentThread().isInterrupted()) { throw new InterruptedException(); } component.getTimer().start(); try { unsolved.addAll(Lists.newArrayList(component.finish())); } finally { component.getTimer().stop(); } } if(errorsOnUnsolved) { unsolved.stream().forEach(c -> { IMessageContent content = MessageContent.builder().append("Unsolved: ").append(c.pp()).build(); messages.add(c.getMessageInfo().withDefault(content)); }); } } public static Solution solve(SolverConfig config, Function1<String, ITermVar> fresh, Iterable<IConstraint> constraints) throws UnsatisfiableException, InterruptedException { final int n = Iterables.size(constraints); long t0 = System.nanoTime(); logger.info(">>> Solving {} constraints <<<", n); Solver solver = new Solver(config, fresh); solver.add(constraints); solver.iterate(); solver.finish(true); long dt = System.nanoTime() - t0; logger.info(">>> Solved {} constraints in {} seconds <<<", n, (Duration.ofNanos(dt).toMillis() / 1000.0)); logger.info(" * namebinding : {} seconds <<<", (Duration.ofNanos(solver.namebindingSolver.getTimer().total()).toMillis() / 1000.0)); logger.info(" * relations : {} seconds <<<", (Duration.ofNanos(solver.relationSolver.getTimer().total()).toMillis() / 1000.0)); return ImmutableSolution.of( // @formatter:off solver.astSolver.getProperties(), solver.namebindingSolver.getScopeGraph(), solver.namebindingSolver.getNameResolution(), solver.namebindingSolver.getProperties(), solver.relationSolver.getRelations(), solver.unifier, solver.symSolver.get(), solver.messages // @formatter:on ); } }
package org.knopflerfish.bundle.junit; import java.io.*; import java.util.Dictionary; import java.util.Hashtable; import org.osgi.framework.*; import org.osgi.util.tracker.*; import org.knopflerfish.service.console.*; import org.knopflerfish.service.junit.*; import junit.framework.*; public class JUnitCommandGroup extends CommandGroupAdapter { private BundleContext bc = null; protected ServiceRegistration reg = null; protected ServiceTracker junitTracker; public JUnitCommandGroup(BundleContext bc) { super("junit", "JUnit test commands"); this.bc = bc; junitTracker = new ServiceTracker(Activator.bc, JUnitService.class.getName(), null); junitTracker.open(); } JUnitService getJUnitService() { JUnitService ju = (JUnitService)junitTracker.getService(); if(ju == null) { throw new RuntimeException("No JUnitService available"); } return ju; } public void register() { if(reg == null) { Hashtable props = new Hashtable(); props.put("groupName", getGroupName()); reg = bc.registerService(CommandGroup.class.getName(), this, props); } } void unregister() { if(reg != null) { reg.unregister(); reg = null; } } public final static String USAGE_LIST = ""; public final static String [] HELP_LIST = new String [] { "List available tests", }; public int cmdList(Dictionary opts, Reader in, PrintWriter out, Session session) { try { String filter = "(|" + "(objectclass=" + Test.class.getName() + ")" + "(objectclass=" + TestSuite.class.getName() + ")" + "(objectclass=" + TestCase.class.getName() + ")" + ")"; ServiceReference[] srl = Activator.bc.getServiceReferences(null, filter); if(srl == null || srl.length == 0) { out.println("No Test services found"); } else { out.println("Found " + srl.length + " tests"); } for(int i = 0; srl != null && i < srl.length; i++) { Object obj = Activator.bc.getService(srl[i]); if(obj instanceof Test) { String id = (String)srl[i].getProperty("service.pid"); String desc = (String)srl[i].getProperty("service.description"); out.print(" " + (i + 1) + ": " + id); if(desc != null && !"".equals(desc)) { out.print(" - " + desc); } out.println(""); } Activator.bc.ungetService(srl[i]); } } catch (Exception e) { e.printStackTrace(out); } return 0; } public final static String USAGE_RUN = "[-out #file#] <id>"; public final static String [] HELP_RUN = new String [] { "Run a test and dump XML results to a file or console.", " id - service.pid of registered test", " -out #file# - optional file name of destionation file.", " If not set, print to console output.", }; public int cmdRun(Dictionary opts, Reader in, PrintWriter out, Session session) { String id = (String)opts.get("id"); String subid = (String)opts.get("-subid"); String outName = (String)opts.get("-out"); PrintWriter pw = out; if(outName != null) { try { File file = new File(outName); pw = new PrintWriter(new FileOutputStream(file)); } catch (Exception e) { e.printStackTrace(out); } } try { TestSuite suite = getJUnitService().getTestSuite(id, subid); getJUnitService().runTest(pw, suite); } catch (Exception e) { e.printStackTrace(out); } if(pw != out) { try { pw.close(); } catch (Exception ignored) { } } return 0; } /** * Wrap a PrintWriter into a PrintStream by overriding all methods. */ public static class PrintWriterStream extends PrintStream { PrintWriter pw; boolean bClose = false; /** * @param pw underlying writer to which all data is send to * @param bClose if <tt>true</tt> close the underlying writer * when <tt>PrintWriterStream.close()</tt> is called. */ public PrintWriterStream(PrintWriter pw, boolean bClose) { super(new ByteArrayOutputStream()); // This is really a dummy stream this.pw = pw; this.bClose = bClose; } /** * Same as <tt>PrintWriterStream(pw, false)</tt> */ public PrintWriterStream(PrintWriter pw) { this(pw, false); } /** * Only closes the underlying stream if * constructued with the close flag. */ public void close() { super.close(); if(bClose) { pw.close(); } } /** * Write using the trivial, but possibly not always correct translation: * <pre> * write((int) byte) * </pre> */ public void write(byte[] buf, int off, int len) { for(int i = off; i < off + len; i++) { write((int)buf[i]); } } public void write(int b) { pw.write(b); } public boolean checkError() { return pw.checkError(); } public void flush() { pw.flush(); } public void print(boolean b) { pw.print(b); } public void print(char c) { pw.print(c); } public void print(char[] s) { pw.print(s); } public void print(double d) { pw.print(d); } public void print(float f) { pw.print(f); } public void print(int i) { pw.print(i); } public void print(long l) { pw.print(l); } public void print(Object obj) { pw.print(obj); } public void print(String s) { pw.print(s); } public void println() { pw.println(); } public void println(boolean x) { pw.println(x); } public void println(char x) { pw.println(x); } public void println(char[] x) { pw.println(x); } public void println(double x) { pw.println(x); } public void println(float x) { pw.println(x); } public void println(int x) { pw.println(x); } public void println(long x) { pw.println(x); } public void println(Object x) { pw.println(x); } public void println(String x) { pw.println(x); } } }
package cz.metacentrum.perun.rpc.methods; import java.util.ArrayList; import java.util.List; import cz.metacentrum.perun.core.api.*; import cz.metacentrum.perun.core.api.exceptions.PerunException; import cz.metacentrum.perun.rpc.ApiCaller; import cz.metacentrum.perun.rpc.ManagerMethod; import cz.metacentrum.perun.rpc.RpcException; import cz.metacentrum.perun.rpc.deserializer.Deserializer; public enum AttributesManagerMethod implements ManagerMethod { /*# * Returns User-Facility attributes. * @param facility int Facility ID * @param user int User ID * @return List<Attribute> Attributes */ /*# * Returns Facility attributes. * @param facility int Facility ID * @return List<Attribute> Attributes */ /*# * Returns all VO attributes. * @param vo int VO ID * @return List<Attribute> Attributes */ /*# * Returns chosen VO attributes. * @param vo int VO ID * @param attrNames List<String> Attribute names * @return List<Attribute> Attributes */ /*# * Returns Member-Resource attributes. * @param member int Member ID * @param resource int Resource ID * @return List<Attribute> Attributes */ /*# * Returns Group-Resource attributes. * @param group int Group ID * @param resource int Resource ID * @return List<Attribute> Attributes */ /*# * Returns Resource attributes. * @param resource int Resource ID * @return List<Attribute> Attributes */ /*# * Returns all Member attributes. * @param member int Member ID * @return List<Attribute> Attributes */ /*# * Returns all Member attributes. * @param member int Member ID * @param workWithUserAttributes int Must = 1 * @return List<Attribute> Attributes */ /*# * Returns chosen Member attributes. * @param member int Member ID * @param attrNames List<String> Attribute names * @return List<Attribute> Attributes */ /*# * Returns all User attributes. * @param user int User ID * @return List<Attribute> Attributes */ /*# * Returns chosen User attributes. * @param user int User ID * @param attrNames List<String> Attribute names * @return List<Attribute> Attributes */ /*# * Returns Group attributes. * @param group int Group ID * @return List<Attribute> Attributes */ /*# * Returns Host attributes. * @param host int Host ID * @return List<Attribute> Attributes */ getAttributes { @Override public List<Attribute> call(ApiCaller ac, Deserializer parms) throws PerunException { if (parms.contains("facility")) { if(parms.contains("user")) { return ac.getAttributesManager().getAttributes(ac.getSession(), ac.getFacilityById(parms.readInt("facility")), ac.getUserById(parms.readInt("user"))); } else { return ac.getAttributesManager().getAttributes(ac.getSession(), ac.getFacilityById(parms.readInt("facility"))); } } else if (parms.contains("vo")) { if (parms.contains("attrNames[]")) { return ac.getAttributesManager().getAttributes(ac.getSession(), ac.getVoById(parms.readInt("vo")), parms.readList("attrNames", String.class)); } else { return ac.getAttributesManager().getAttributes(ac.getSession(), ac.getVoById(parms.readInt("vo"))); } } else if (parms.contains("resource")) { if (parms.contains("member")) { return ac.getAttributesManager().getAttributes(ac.getSession(), ac.getResourceById(parms.readInt("resource")), ac.getMemberById(parms.readInt("member"))); } else if (parms.contains("group")) { return ac.getAttributesManager().getAttributes(ac.getSession(), ac.getResourceById(parms.readInt("resource")), ac.getGroupById(parms.readInt("group"))); } else { return ac.getAttributesManager().getAttributes(ac.getSession(), ac.getResourceById(parms.readInt("resource"))); } } else if (parms.contains("member")) { if (parms.contains("workWithUserAttributes")){ if (parms.contains("attrNames[]")) { return ac.getAttributesManager().getAttributes(ac.getSession(), ac.getMemberById(parms.readInt("member")), parms.readList("attrNames", String.class), parms.readInt("workWithUserAttributes") == 1); } else { return ac.getAttributesManager().getAttributes(ac.getSession(), ac.getMemberById(parms.readInt("member")), parms.readInt("workWithUserAttributes") == 1); } } else if (parms.contains("attrNames[]")) { return ac.getAttributesManager().getAttributes(ac.getSession(), ac.getMemberById(parms.readInt("member")), parms.readList("attrNames", String.class)); } else { return ac.getAttributesManager().getAttributes(ac.getSession(), ac.getMemberById(parms.readInt("member"))); } } else if (parms.contains("user")) { if (parms.contains("attrNames[]")) { return ac.getAttributesManager().getAttributes(ac.getSession(), ac.getUserById(parms.readInt("user")), parms.readList("attrNames", String.class)); } else { return ac.getAttributesManager().getAttributes(ac.getSession(), ac.getUserById(parms.readInt("user"))); } } else if (parms.contains("group")) { return ac.getAttributesManager().getAttributes(ac.getSession(), ac.getGroupById(parms.readInt("group"))); } else if (parms.contains("host")) { return ac.getAttributesManager().getAttributes(ac.getSession(), ac.getHostById(parms.readInt("host"))); } else { throw new RpcException(RpcException.Type.MISSING_VALUE, "facility, vo, resource, member, user, host or group"); } } }, /*# * Returns all entitiless attributes. * * @return List<Attribute> Attributes */ getEntitylessAttributes { @Override public List<Attribute> call(ApiCaller ac, Deserializer parms) throws PerunException { return ac.getAttributesManager().getEntitylessAttributes(ac.getSession(), parms.readString("attrName")); } }, /*# * Sets the attributes. * * @param facility int Facility ID * @param user int User ID * @param member int Member ID * @param resoruce int Resource ID * @param attributes List<Attribute> List of attributes */ /*# * Sets the attributes. * * @param facility int Facility ID * @param user int User ID * @param attributes List<Attribute> List of attributes */ /*# * Sets the attributes. * * @param facility int Facility ID * @param attributes List<Attribute> List of attributes */ /*# * Sets the attributes. * * @param vo int VO ID * @param attributes List<Attribute> List of attributes */ /*# * Sets the attributes. * * @param member int Member ID * @param resoruce int Resource ID * @param attributes List<Attribute> List of attributes */ /*# * Sets the attributes. * * @param group int Group ID * @param resoruce int Resource ID * @param attributes List<Attribute> List of attributes */ /*# * Sets the attributes. * * @param resoruce int Resource ID * @param attributes List<Attribute> List of attributes */ /*# * Sets the attributes. * * @param member int Member ID * @param workWithUserAttributes int Must = 1 * @param attributes List<Attribute> List of attributes */ /*# * Sets the attributes. * * @param member int Member ID * @param attributes List<Attribute> List of attributes */ /*# * Sets the attributes. * * @param user int User ID * @param attributes List<Attribute> List of attribbutes */ /*# * Sets the attributes. * * @param group int Group ID * @param attributes List<Attribute> List of attribbutes */ /*# * Sets the attributes. * * @param host int Host ID * @param attributes List<Attribute> List of attribbutes */ setAttributes { @Override public Void call(ApiCaller ac, Deserializer parms) throws PerunException { ac.stateChangingCheck(); if (parms.contains("facility")) { if(parms.contains("user")) { if (parms.contains("member") && parms.contains("resource")) { ac.getAttributesManager().setAttributes(ac.getSession(), ac.getFacilityById(parms.readInt("facility")), ac.getResourceById(parms.readInt("resource")), ac.getUserById(parms.readInt("user")), ac.getMemberById(parms.readInt("member")), parms.readList("attributes", Attribute.class)); } else { ac.getAttributesManager().setAttributes(ac.getSession(), ac.getFacilityById(parms.readInt("facility")), ac.getUserById(parms.readInt("user")), parms.readList("attributes", Attribute.class)); } } else { ac.getAttributesManager().setAttributes(ac.getSession(), ac.getFacilityById(parms.readInt("facility")), parms.readList("attributes", Attribute.class)); } } else if (parms.contains("vo")) { ac.getAttributesManager().setAttributes(ac.getSession(), ac.getVoById(parms.readInt("vo")), parms.readList("attributes", Attribute.class)); } else if (parms.contains("resource")) { if (parms.contains("member")) { ac.getAttributesManager().setAttributes(ac.getSession(), ac.getResourceById(parms.readInt("resource")), ac.getMemberById(parms.readInt("member")), parms.readList("attributes", Attribute.class)); } else if (parms.contains("group")) { ac.getAttributesManager().setAttributes(ac.getSession(), ac.getResourceById(parms.readInt("resource")), ac.getGroupById(parms.readInt("group")), parms.readList("attributes", Attribute.class)); } else { ac.getAttributesManager().setAttributes(ac.getSession(), ac.getResourceById(parms.readInt("resource")), parms.readList("attributes", Attribute.class)); } } else if (parms.contains("member")) { if(parms.contains("workWithUserAttributes")){ if(parms.readInt("workWithUserAttributes")!=1){ ac.getAttributesManager().setAttributes(ac.getSession(), ac.getMemberById(parms.readInt("member")), parms.readList("attributes", Attribute.class), false); }else{ ac.getAttributesManager().setAttributes(ac.getSession(), ac.getMemberById(parms.readInt("member")), parms.readList("attributes", Attribute.class), true); } }else{ ac.getAttributesManager().setAttributes(ac.getSession(), ac.getMemberById(parms.readInt("member")), parms.readList("attributes", Attribute.class)); } } else if (parms.contains("user")) { ac.getAttributesManager().setAttributes(ac.getSession(), ac.getUserById(parms.readInt("user")), parms.readList("attributes", Attribute.class)); } else if (parms.contains("group")) { ac.getAttributesManager().setAttributes(ac.getSession(), ac.getGroupById(parms.readInt("group")), parms.readList("attributes", Attribute.class)); } else if (parms.contains("host")) { ac.getAttributesManager().setAttributes(ac.getSession(), ac.getHostById(parms.readInt("host")), parms.readList("attributes", Attribute.class)); } else { throw new RpcException(RpcException.Type.MISSING_VALUE, "facility, vo, resource, member, user, host or group"); } return null; } }, /*# * Returns an Attribute by its ID. * * @param facility int Facility ID * @param user int User ID * @param attributeId int Attribute ID * @return Attribute Found Attribute */ /*# * Returns an Attribute by its ID. * * @param facility int Facility ID * @param attributeId int Attribute ID * @return Attribute Found Attribute */ /*# * Returns an Attribute by its ID. * * @param vo int VO ID * @param attributeId int Attribute ID * @return Attribute Found Attribute */ /*# * Returns an Attribute by its ID. * * @param member int Member ID * @param resoruce int Resource ID * @param attributeId int Attribute ID * @return Attribute Found Attribute */ /*# * Returns an Attribute by its ID. * * @param group int Group ID * @param resoruce int Resource ID * @param attributeId int Attribute ID * @return Attribute Found Attribute */ /*# * Returns an Attribute by its ID. * * @param resoruce int Resource ID * @param attributeId int Attribute ID * @return Attribute Found Attribute */ /*# * Returns an Attribute by its ID. * * @param member int Member ID * @param attributeId int Attribute ID * @return Attribute Found Attribute */ /*# * Returns an Attribute by its ID. * * @param user int User ID * @param attributeId int Attribute ID * @return Attribute Found Attribute */ /*# * Returns an Attribute by its ID. * * @param host int Host ID * @param attributeId int Attribute ID * @return Attribute Found Attribute */ /*# * Returns an Attribute by its name. * * @param facility int Facility ID * @param user int User ID * @param attributeName String Attribute name * @return Attribute Found Attribute */ /*# * Returns an Attribute by its name. * * @param facility int Facility ID * @param attributeName String Attribute name * @return Attribute Found Attribute */ /*# * Returns an Attribute by its name. * * @param vo int VO ID * @param attributeName String Attribute name * @return Attribute Found Attribute */ /*# * Returns an Attribute by its name. * * @param member int Member ID * @param resoruce int Resource ID * @param attributeName String Attribute name * @return Attribute Found Attribute */ /*# * Returns an Attribute by its name. * * @param group int Group ID * @param resoruce int Resource ID * @param attributeName String Attribute name * @return Attribute Found Attribute */ /*# * Returns an Attribute by its name. * * @param resoruce int Resource ID * @param attributeName String Attribute name * @return Attribute Found Attribute */ /*# * Returns an Attribute by its name. * * @param member int Member ID * @param attributeName String Attribute name * @return Attribute Found Attribute */ /*# * Returns an Attribute by its name. * * @param user int User ID * @param attributeName String Attribute name * @return Attribute Found Attribute */ /*# * Returns an Attribute by its name. * * @param host int Host ID * @param attributeName String Attribute name * @return Attribute Found Attribute */ getAttribute { @Override public Attribute call(ApiCaller ac, Deserializer parms) throws PerunException { if(parms.contains("attributeId")) { if (parms.contains("facility")) { if (parms.contains("user")) { return ac.getAttributesManager().getAttributeById(ac.getSession(), ac.getFacilityById(parms.readInt("facility")), ac.getUserById(parms.readInt("user")), parms.readInt("attributeId")); } else { return ac.getAttributesManager().getAttributeById(ac.getSession(), ac.getFacilityById(parms.readInt("facility")), parms.readInt("attributeId")); } } else if (parms.contains("vo")) { return ac.getAttributesManager().getAttributeById(ac.getSession(), ac.getVoById(parms.readInt("vo")), parms.readInt("attributeId")); } else if (parms.contains("resource")) { if (parms.contains("member")) { return ac.getAttributesManager().getAttributeById(ac.getSession(), ac.getResourceById(parms.readInt("resource")), ac.getMemberById(parms.readInt("member")), parms.readInt("attributeId")); }else if(parms.contains("group")) { return ac.getAttributesManager().getAttributeById(ac.getSession(), ac.getResourceById(parms.readInt("resource")), ac.getGroupById(parms.readInt("group")), parms.readInt("attributeId")); } else { return ac.getAttributesManager().getAttributeById(ac.getSession(), ac.getResourceById(parms.readInt("resource")), parms.readInt("attributeId")); } } else if (parms.contains("member")) { return ac.getAttributesManager().getAttributeById(ac.getSession(), ac.getMemberById(parms.readInt("member")), parms.readInt("attributeId")); } else if (parms.contains("user")) { return ac.getAttributesManager().getAttributeById(ac.getSession(), ac.getUserById(parms.readInt("user")), parms.readInt("attributeId")); /* Not implemented yet } else if (parms.contains("group")) { return ac.getAttributesManager().getAttributeById(ac.getSession(), ac.getGroupById(parms.readInt("group")), parms.readInt("attributeId")); */ } else if (parms.contains("host")) { return ac.getAttributesManager().getAttributeById(ac.getSession(), ac.getHostById(parms.readInt("host")), parms.readInt("attributeId")); /* Not implemented yet } else if (parms.contains("key")) { return ac.getAttributesManager().getAttributeById(ac.getSession(), parms.readString("key"), parms.readInt("attributeId")); */ } else { throw new RpcException(RpcException.Type.MISSING_VALUE, "facility, vo, resource, member, user, host, key or group"); } } else { if (parms.contains("facility")) { if (parms.contains("user")) { return ac.getAttributesManager().getAttribute(ac.getSession(), ac.getFacilityById(parms.readInt("facility")), ac.getUserById(parms.readInt("user")), parms.readString("attributeName")); } else { return ac.getAttributesManager().getAttribute(ac.getSession(), ac.getFacilityById(parms.readInt("facility")), parms.readString("attributeName")); } } else if (parms.contains("vo")) { return ac.getAttributesManager().getAttribute(ac.getSession(), ac.getVoById(parms.readInt("vo")), parms.readString("attributeName")); } else if (parms.contains("resource")) { if (parms.contains("member")) { return ac.getAttributesManager().getAttribute(ac.getSession(), ac.getResourceById(parms.readInt("resource")), ac.getMemberById(parms.readInt("member")), parms.readString("attributeName")); }else if(parms.contains("group")) { return ac.getAttributesManager().getAttribute(ac.getSession(), ac.getResourceById(parms.readInt("resource")), ac.getGroupById(parms.readInt("group")), parms.readString("attributeName")); } else { return ac.getAttributesManager().getAttribute(ac.getSession(), ac.getResourceById(parms.readInt("resource")), parms.readString("attributeName")); } } else if (parms.contains("member")) { return ac.getAttributesManager().getAttribute(ac.getSession(), ac.getMemberById(parms.readInt("member")), parms.readString("attributeName")); } else if (parms.contains("user")) { return ac.getAttributesManager().getAttribute(ac.getSession(), ac.getUserById(parms.readInt("user")), parms.readString("attributeName")); } else if (parms.contains("group")) { return ac.getAttributesManager().getAttribute(ac.getSession(), ac.getGroupById(parms.readInt("group")), parms.readString("attributeName")); } else if (parms.contains("host")) { return ac.getAttributesManager().getAttribute(ac.getSession(), ac.getHostById(parms.readInt("host")), parms.readString("attributeName")); } else if (parms.contains("key")) { return ac.getAttributesManager().getAttribute(ac.getSession(), parms.readString("key"), parms.readString("attributeName")); } else { throw new RpcException(RpcException.Type.MISSING_VALUE, "facility, vo, resource, member, user, host, key or group"); } } } }, /*# * Returns AttributeDefinition. * * @param attributeName String Attribute name * @return AttributeDefinition Definition of an Attribute */ getAttributeDefinition { @Override public AttributeDefinition call(ApiCaller ac, Deserializer parms) throws PerunException { return ac.getAttributesManager().getAttributeDefinition(ac.getSession(), parms.readString("attributeName")); } }, /*# * Returns all AttributeDefinitions. * * @return List<AttributeDefinition> Definitions of Attributes */ getAttributesDefinition { @Override public List<AttributeDefinition> call(ApiCaller ac, Deserializer parms) throws PerunException { return ac.getAttributesManager().getAttributesDefinition(ac.getSession()); } }, /*# * Returns AttributeDefinition. * * @param id int Attribute ID * @return AttributeDefinition Definition of an Attribute */ getAttributeDefinitionById { @Override public AttributeDefinition call(ApiCaller ac, Deserializer parms) throws PerunException { return ac.getAttributeDefinitionById(parms.readInt("id")); } }, /*# * Returns all AttributeDefinitions in a namespace. * * @return List<AttributeDefinition> Definitions of Attributes in a namespace */ getAttributesDefinitionByNamespace { @Override public List<AttributeDefinition> call(ApiCaller ac, Deserializer parms) throws PerunException { return ac.getAttributesManager().getAttributesDefinitionByNamespace(ac.getSession(), parms.readString("namespace")); } }, /*# * Returns all AttributeDefinitions for every entity and possible combination of entities with rights * * @return List<AttributeDefinition> Definitions of Attributes for entities */ getAttributesDefinitionWithRights { @Override public List<AttributeDefinition> call(ApiCaller ac, Deserializer parms) throws PerunException { User user = null; Member member = null; Vo vo = null; Group group = null; Resource resource = null; Facility facility = null; Host host = null; //Not supported entityless attirbutes now //String entityless = null; List<PerunBean> entities = new ArrayList<PerunBean>(); //If member exists in query if(parms.contains("member")) { member = ac.getMemberById(parms.readInt("member")); entities.add(member); } //If user exists in query if(parms.contains("user")) { user = ac.getUserById(parms.readInt("user")); entities.add(user); } //If vo exists in query if(parms.contains("vo")) { vo = ac.getVoById(parms.readInt("vo")); entities.add(vo); } //If group exists in query if(parms.contains("group")) { group = ac.getGroupById(parms.readInt("group")); entities.add(group); } //If resource exists in query if(parms.contains("resource")) { resource = ac.getResourceById(parms.readInt("resource")); entities.add(resource); } //If facility exists in query if(parms.contains("facility")) { facility = ac.getFacilityById(parms.readInt("facility")); entities.add(facility); } //If host exists in query if(parms.contains("host")) { host = ac.getHostById(parms.readInt("host")); entities.add(host); } //If entityless exists in query /*if(parms.contains("entityless")) { }*/ List<AttributeDefinition> attributesDefinition = ac.getAttributesManager().getAttributesDefinitionWithRights(ac.getSession(), entities); return attributesDefinition; } }, /*# * Returns an Attribute by its ID. * * @param facility int Facility ID * @param id int Attribute ID * @return Attribute Found Attribute */ /*# * Returns an Attribute by its ID. * * @param vo int VO ID * @param id int Attribute ID * @return Attribute Found Attribute */ /*# * Returns an Attribute by its ID. * * @param member int Member ID * @param resoruce int Resource ID * @param id int Attribute ID * @return Attribute Found Attribute */ /*# * Returns an Attribute by its ID. * * @param resoruce int Resource ID * @param id int Attribute ID * @return Attribute Found Attribute */ /*# * Returns an Attribute by its ID. * * @param host int Host ID * @param id int Attribute ID * @return Attribute Found Attribute */ getAttributeById { @Override public Attribute call(ApiCaller ac, Deserializer parms) throws PerunException { if (parms.contains("facility")) { return ac.getAttributeById( ac.getFacilityById(parms.readInt("facility")), parms.readInt("id")); } else if (parms.contains("vo")) { return ac.getAttributeById( ac.getVoById(parms.readInt("vo")), parms.readInt("id")); } else if (parms.contains("resource")) { if (parms.contains("member")) { return ac.getAttributeById( ac.getResourceById(parms.readInt("resource")), ac.getMemberById(parms.readInt("member")), parms.readInt("id")); } else { return ac.getAttributeById( ac.getResourceById(parms.readInt("resource")), parms.readInt("id")); } } else if (parms.contains("host")) { return ac.getAttributeById( ac.getHostById(parms.readInt("host")), parms.readInt("id")); } else { throw new RpcException(RpcException.Type.MISSING_VALUE, "facility, vo, host or resource"); } } }, /*# * Sets an Attribute. * * @param facility int Facility ID * @param user int User ID * @param attribute Attribute JSON object */ /*# * Sets an Attribute. * * @param facility int Facility ID * @param attribute Attribute JSON object */ /*# * Sets an Attribute. * * @param vo int VO ID * @param attribute Attribute JSON object */ /*# * Sets an Attribute. * * @param member int Member ID * @param resoruce int Resource ID * @param attribute Attribute JSON object */ /*# * Sets an Attribute. * * @param group int Group ID * @param resoruce int Resource ID * @param attribute Attribute JSON object */ /*# * Sets an Attribute. * * @param resoruce int Resource ID * @param attribute Attribute JSON object */ /*# * Sets an Attribute. * * @param member int Member ID * @param attribute Attribute JSON object */ /*# * Sets an Attribute. * * @param user int User ID * @param attribute Attribute JSON object */ /*# * Sets an Attribute. * * @param group int Group ID * @param attribute Attribute JSON object * */ /*# * Sets an Attribute. * * @param host int Host ID * @param attribute Attribute JSON object */ setAttribute { @Override public Void call(ApiCaller ac, Deserializer parms) throws PerunException { ac.stateChangingCheck(); if (parms.contains("facility")) { if(parms.contains("user")) { ac.getAttributesManager().setAttribute(ac.getSession(), ac.getFacilityById(parms.readInt("facility")), ac.getUserById(parms.readInt("user")), parms.read("attribute", Attribute.class)); } else { ac.getAttributesManager().setAttribute(ac.getSession(), ac.getFacilityById(parms.readInt("facility")), parms.read("attribute", Attribute.class)); } } else if (parms.contains("vo")) { ac.getAttributesManager().setAttribute(ac.getSession(), ac.getVoById(parms.readInt("vo")), parms.read("attribute", Attribute.class)); } else if (parms.contains("resource")) { if (parms.contains("member")) { ac.getAttributesManager().setAttribute(ac.getSession(), ac.getResourceById(parms.readInt("resource")), ac.getMemberById(parms.readInt("member")), parms.read("attribute", Attribute.class)); } else if(parms.contains("group")) { ac.getAttributesManager().setAttribute(ac.getSession(), ac.getResourceById(parms.readInt("resource")), ac.getGroupById(parms.readInt("group")), parms.read("attribute", Attribute.class)); } else { ac.getAttributesManager().setAttribute(ac.getSession(), ac.getResourceById(parms.readInt("resource")), parms.read("attribute", Attribute.class)); } } else if (parms.contains("member")) { ac.getAttributesManager().setAttribute(ac.getSession(), ac.getMemberById(parms.readInt("member")), parms.read("attribute", Attribute.class)); } else if (parms.contains("user")) { ac.getAttributesManager().setAttribute(ac.getSession(), ac.getUserById(parms.readInt("user")), parms.read("attribute", Attribute.class)); } else if (parms.contains("group")) { ac.getAttributesManager().setAttribute(ac.getSession(), ac.getGroupById(parms.readInt("group")), parms.read("attribute", Attribute.class)); } else if (parms.contains("host")) { ac.getAttributesManager().setAttribute(ac.getSession(), ac.getHostById(parms.readInt("host")), parms.read("attribute", Attribute.class)); } else if (parms.contains("key")) { ac.getAttributesManager().setAttribute(ac.getSession(), parms.readString("key"), parms.read("attribute", Attribute.class)); } else { throw new RpcException(RpcException.Type.MISSING_VALUE, "facility, vo, resource, user, member, host, key or group"); } return null; } }, /*# * Creates AttributeDefinition * @param attribute AttributeDefinition JSON object * @return AttributeDefinition Created AttributeDefinition */ createAttribute { @Override public AttributeDefinition call(ApiCaller ac, Deserializer parms) throws PerunException { ac.stateChangingCheck(); AttributeDefinition attribute = parms.read("attribute", AttributeDefinition.class); return ac.getAttributesManager().createAttribute(ac.getSession(),attribute); } }, /*# * Deletes Attribute * @param attribute int Attribute ID */ deleteAttribute { @Override public Void call(ApiCaller ac, Deserializer parms) throws PerunException { ac.stateChangingCheck(); ac.getAttributesManager().deleteAttribute(ac.getSession(), ac.getAttributeDefinitionById(parms.readInt("attribute"))); return null; } }, /*# * Returns required attributes. * * @param member int Member ID * @param service int Service ID * @param resource int Resource ID * @return List<Attribute> Required Attributes */ /*# * Returns required attributes. * * @param group int Group ID * @param service int Service ID * @param resource int Resource ID * @return List<Attribute> Required Attributes */ /*# * Returns required attributes. * * @param service int Service ID * @param resource int Resource ID * @return List<Attribute> Required Attributes */ /*# * Returns required attributes. * * @param facility int Facility ID * @param service int Service ID * @return List<Attribute> Required Attributes */ /*# * Returns required attributes. * * @param facility int Facility ID * @param services List<int> list of Service IDs * @return List<Attribute> Required Attributes */ /*# * Returns required attributes. * * @param host int Host ID * @param service int Service ID * @return List<Attribute> Required Attributes */ /*# * Returns required attributes. * * @param member int Member ID * @param resource int Resource ID * @return List<Attribute> Required Attributes */ /*# * Returns required attributes. * * @param member int Member ID * @param resource int Resource ID * @param workWithUserAttributes int Must = 1 * @return List<Attribute> Required Attributes */ /*# * Returns required attributes. * * @param resource int Resource ID * @return List<Attribute> Required Attributes */ /*# * Returns required attributes. * * @param facility int Facility ID * @param user int User ID * @return List<Attribute> Required Attributes */ /*# * Returns required attributes. * * @param facility int Facility ID * @return List<Attribute> Required Attributes */ /*# * Returns required attributes. * * @param member int Member ID * @return List<Attribute> Required Attributes */ /*# * Returns required attributes. * * @param member int Member ID * @param workWithUserAttributes int Must = 1 * @return List<Attribute> Required Attributes */ /*# * Returns required attributes. * * @param user int User ID * @return List<Attribute> Required Attributes */ getRequiredAttributes { @Override public List<Attribute> call(ApiCaller ac, Deserializer parms) throws PerunException { if (parms.contains("service")) { if (parms.contains("resource")) { if (parms.contains("member")) { return ac.getAttributesManager().getRequiredAttributes(ac.getSession(), ac.getServiceById(parms.readInt("service")), ac.getResourceById(parms.readInt("resource")), ac.getMemberById(parms.readInt("member"))); } else if (parms.contains("group")) { return ac.getAttributesManager().getRequiredAttributes(ac.getSession(), ac.getServiceById(parms.readInt("service")), ac.getResourceById(parms.readInt("resource")), ac.getGroupById(parms.readInt("group"))); } else { return ac.getAttributesManager().getRequiredAttributes(ac.getSession(), ac.getServiceById(parms.readInt("service")), ac.getResourceById(parms.readInt("resource"))); } } else if (parms.contains("facility")) { return ac.getAttributesManager().getRequiredAttributes(ac.getSession(), ac.getServiceById(parms.readInt("service")), ac.getFacilityById(parms.readInt("facility"))); } else if (parms.contains("host")) { return ac.getAttributesManager().getRequiredAttributes(ac.getSession(), ac.getServiceById(parms.readInt("service")), ac.getHostById(parms.readInt("host"))); } else { throw new RpcException(RpcException.Type.MISSING_VALUE, "host, resource or facility"); } } else if (parms.contains("services[]")) { // get list of services List<Service> services = new ArrayList<Service>(); List<Integer> servIds = parms.readList("services", Integer.class); for (Integer id : servIds) { Service s = ac.getServiceById(id); if (!services.contains(s)) { services.add(s); } } if (parms.contains("facility")) { return ac.getAttributesManager().getRequiredAttributes(ac.getSession(), services, ac.getFacilityById(parms.readInt("facility"))); } else { throw new RpcException(RpcException.Type.MISSING_VALUE, "facility"); } } else if (parms.contains("resource")) { if (parms.contains("member")) { if (parms.contains("workWithUserAttributes")) { return ac.getAttributesManager().getRequiredAttributes(ac.getSession(), ac.getResourceById(parms.readInt("resource")), ac.getMemberById(parms.readInt("member")), parms.readInt("workWithUserAttributes") == 1); } else { return ac.getAttributesManager().getRequiredAttributes(ac.getSession(), ac.getResourceById(parms.readInt("resource")), ac.getMemberById(parms.readInt("member"))); } } else { return ac.getAttributesManager().getRequiredAttributes(ac.getSession(), ac.getResourceById(parms.readInt("resource"))); } } else if (parms.contains("facility")) { if (parms.contains("user")) { return ac.getAttributesManager().getRequiredAttributes(ac.getSession(), ac.getFacilityById(parms.readInt("facility")), ac.getUserById(parms.readInt("user"))); } else { return ac.getAttributesManager().getRequiredAttributes(ac.getSession(), ac.getFacilityById(parms.readInt("facility"))); } } else if (parms.contains("member")) { if (parms.contains("workWithUserAttributes")){ return ac.getAttributesManager().getRequiredAttributes(ac.getSession(), ac.getMemberById(parms.readInt("member")), parms.readInt("workWithUserAttributes") == 1); } else { return ac.getAttributesManager().getRequiredAttributes(ac.getSession(), ac.getMemberById(parms.readInt("member")), false); } } else if (parms.contains("user")) { return ac.getAttributesManager().getRequiredAttributes(ac.getSession(), ac.getUserById(parms.readInt("user"))); } else { throw new RpcException(RpcException.Type.MISSING_VALUE, "service, resource, facility, member or user"); } } }, /*# * Returns requried attributes definition for a Service. * * @param service int Service ID * @return List<AttributeDefinition> Attributes definitions */ getRequiredAttributesDefinition { @Override public List<AttributeDefinition> call(ApiCaller ac, Deserializer parms) throws PerunException { return ac.getAttributesManager().getRequiredAttributesDefinition(ac.getSession(), ac.getServiceById(parms.readInt("service"))); } }, getResourceRequiredAttributes { @Override public List<Attribute> call(ApiCaller ac, Deserializer parms) throws PerunException { if (parms.contains("resourceToGetServicesFrom")) { if (parms.contains("member")) { if (parms.contains("resource")) { if (parms.contains("workWithUserAttributes")){ return ac.getAttributesManager().getResourceRequiredAttributes(ac.getSession(), ac.getResourceById(parms.readInt("resourceToGetServicesFrom")), ac.getResourceById(parms.readInt("resource")), ac.getMemberById(parms.readInt("member")), parms.readInt("workWithUserAttributes") == 1); } else { return ac.getAttributesManager().getResourceRequiredAttributes(ac.getSession(), ac.getResourceById(parms.readInt("resourceToGetServicesFrom")), ac.getResourceById(parms.readInt("resource")), ac.getMemberById(parms.readInt("member"))); } } else { return ac.getAttributesManager().getResourceRequiredAttributes(ac.getSession(), ac.getResourceById(parms.readInt("resourceToGetServicesFrom")), ac.getMemberById(parms.readInt("member"))); } } else if (parms.contains("user")) { if (parms.contains("facility")) { return ac.getAttributesManager().getResourceRequiredAttributes(ac.getSession(), ac.getResourceById(parms.readInt("resourceToGetServicesFrom")), ac.getFacilityById(parms.readInt("facility")), ac.getUserById(parms.readInt("user"))); } else { return ac.getAttributesManager().getResourceRequiredAttributes(ac.getSession(), ac.getResourceById(parms.readInt("resourceToGetServicesFrom")), ac.getUserById(parms.readInt("user"))); } } else if (parms.contains("group")) { if(parms.contains("resource")) { return ac.getAttributesManager().getResourceRequiredAttributes(ac.getSession(), ac.getResourceById(parms.readInt("resourceToGetServicesFrom")), ac.getResourceById(parms.readInt("resource")), ac.getGroupById(parms.readInt("group"))); } else { return ac.getAttributesManager().getResourceRequiredAttributes(ac.getSession(), ac.getResourceById(parms.readInt("resourceToGetServicesFrom")), ac.getGroupById(parms.readInt("group"))); } } else { throw new RpcException(RpcException.Type.MISSING_VALUE, "member or user"); } } else { throw new RpcException(RpcException.Type.MISSING_VALUE, "resourceToGetServicesFrom"); } } }, fillAttribute { @Override public Attribute call(ApiCaller ac, Deserializer parms) throws PerunException { ac.stateChangingCheck(); if (parms.contains("group")) { Group group = ac.getGroupById(parms.readInt("group")); return ac.getAttributesManager().fillAttribute(ac.getSession(), group, ac.getAttributeById(group, parms.readInt("attribute"))); } else if (parms.contains("host")) { Host host = ac.getHostById(parms.readInt("host")); return ac.getAttributesManager().fillAttribute(ac.getSession(), host, ac.getAttributeById(host, parms.readInt("attribute"))); } else if (parms.contains("resource")) { Resource resource = ac.getResourceById(parms.readInt("resource")); if (parms.contains("group")) { Group group = ac.getGroupById(parms.readInt("group")); return ac.getAttributesManager().fillAttribute(ac.getSession(), resource, group, ac.getAttributeById(resource, group, parms.readInt("attribute"))); } else if (parms.contains("member")) { Member member = ac.getMemberById(parms.readInt("member")); return ac.getAttributesManager().fillAttribute(ac.getSession(), resource, member, ac.getAttributeById(resource, member, parms.readInt("attribute"))); } else { return ac.getAttributesManager().fillAttribute(ac.getSession(), resource, ac.getAttributeById(resource, parms.readInt("attribute"))); } } else if (parms.contains("user")) { User user = ac.getUserById(parms.readInt("user")); if (parms.contains("facility")) { Facility facility = ac.getFacilityById(parms.readInt("facility")); return ac.getAttributesManager().fillAttribute(ac.getSession(), facility, user, ac.getAttributeById(facility, user, parms.readInt("attribute"))); } else { return ac.getAttributesManager().fillAttribute(ac.getSession(), user, ac.getAttributeById(user, parms.readInt("attribute"))); } } else if (parms.contains("member")) { Member member = ac.getMemberById(parms.readInt("member")); return ac.getAttributesManager().fillAttribute(ac.getSession(), member, ac.getAttributeById(member, parms.readInt("attribute"))); } else { throw new RpcException(RpcException.Type.MISSING_VALUE, "group, host, resoure, user, member"); } } }, fillAttributes { @Override public List<Attribute> call(ApiCaller ac, Deserializer parms) throws PerunException { ac.stateChangingCheck(); List<Attribute> attributes = new ArrayList<Attribute>(); if(parms.contains("attributes")) { attributes = parms.readList("attributes", Attribute.class); } else { throw new RpcException(RpcException.Type.MISSING_VALUE, "attributes"); } if (parms.contains("host")) { Host host = ac.getHostById(parms.readInt("host")); return ac.getAttributesManager().fillAttributes(ac.getSession(), host, attributes); } else if (parms.contains("resource")) { Resource resource = ac.getResourceById(parms.readInt("resource")); if (parms.contains("group")) { Group group = ac.getGroupById(parms.readInt("group")); return ac.getAttributesManager().fillAttributes(ac.getSession(), resource, group, attributes); } else if (parms.contains("user")) { User user = ac.getUserById(parms.readInt("user")); if (parms.contains("facility") && parms.contains("member")) { Facility facility = ac.getFacilityById(parms.readInt("facility")); Member member = ac.getMemberById(parms.readInt("member")); return ac.getAttributesManager().fillAttributes(ac.getSession(), facility, resource, user, member, attributes); } else { throw new RpcException(RpcException.Type.MISSING_VALUE, "facility, member"); } } else if (parms.contains("member")) { Member member = ac.getMemberById(parms.readInt("member")); if (parms.contains("workWithUserAttributes")) { if(parms.readInt("workWithUserAttributes") != 1) { return ac.getAttributesManager().fillAttributes(ac.getSession(), resource, member, attributes, false); } else { return ac.getAttributesManager().fillAttributes(ac.getSession(), resource, member, attributes, true); } } else { return ac.getAttributesManager().fillAttributes(ac.getSession(), resource, member, attributes); } } else { return ac.getAttributesManager().fillAttributes(ac.getSession(), resource, attributes); } } else if (parms.contains("group")) { Group group = ac.getGroupById(parms.readInt("group")); return ac.getAttributesManager().fillAttributes(ac.getSession(), group, attributes); } else if (parms.contains("user")) { User user = ac.getUserById(parms.readInt("user")); if (parms.contains("facility")) { Facility facility = ac.getFacilityById(parms.readInt("facility")); return ac.getAttributesManager().fillAttributes(ac.getSession(), facility, user, attributes); } else { return ac.getAttributesManager().fillAttributes(ac.getSession(), user, attributes); } } else if (parms.contains("member")) { Member member = ac.getMemberById(parms.readInt("member")); return ac.getAttributesManager().fillAttributes(ac.getSession(), member, attributes); } else { throw new RpcException(RpcException.Type.MISSING_VALUE, "group, host, resoure, user, member"); } } }, checkAttributeValue { @Override public Void call(ApiCaller ac, Deserializer parms) throws PerunException { if (parms.contains("facility")) { ac.getAttributesManager().checkAttributeValue(ac.getSession(), ac.getFacilityById(parms.readInt("facility")), parms.read("attribute", Attribute.class)); } else if (parms.contains("vo")) { ac.getAttributesManager().checkAttributeValue(ac.getSession(), ac.getVoById(parms.readInt("vo")), parms.read("attribute", Attribute.class)); } else if (parms.contains("resource")) { if (parms.contains("member")) { ac.getAttributesManager().checkAttributeValue(ac.getSession(), ac.getResourceById(parms.readInt("resource")), ac.getMemberById(parms.readInt("member")), parms.read("attribute", Attribute.class)); } else { ac.getAttributesManager().checkAttributeValue(ac.getSession(), ac.getResourceById(parms.readInt("resource")), parms.read("attribute", Attribute.class)); } } else if (parms.contains("user")) { ac.getAttributesManager().checkAttributeValue(ac.getSession(), ac.getUserById(parms.readInt("user")), parms.read("attribute", Attribute.class)); } else { throw new RpcException(RpcException.Type.MISSING_VALUE, "resource, vo, facility, member or user"); } return null; } }, checkAttributesValue { @Override public Void call(ApiCaller ac, Deserializer parms) throws PerunException { if (parms.contains("facility")) { ac.getAttributesManager().checkAttributesValue(ac.getSession(), ac.getFacilityById(parms.readInt("facility")), parms.readList("attributes", Attribute.class)); return null; } else if (parms.contains("vo")) { ac.getAttributesManager().checkAttributesValue(ac.getSession(), ac.getVoById(parms.readInt("vo")), parms.readList("attributes", Attribute.class)); return null; } else if (parms.contains("resource")) { if (parms.contains("member")) { ac.getAttributesManager().checkAttributesValue(ac.getSession(), ac.getResourceById(parms.readInt("resource")), ac.getMemberById(parms.readInt("member")), parms.readList("attributes", Attribute.class)); return null; } else { ac.getAttributesManager().checkAttributesValue(ac.getSession(), ac.getResourceById(parms.readInt("resource")), parms.readList("attributes", Attribute.class)); return null; } } else if (parms.contains("host")) { ac.getAttributesManager().checkAttributesValue(ac.getSession(), ac.getHostById(parms.readInt("host")), parms.readList("attributes", Attribute.class)); return null; } else if (parms.contains("user")) { ac.getAttributesManager().checkAttributesValue(ac.getSession(), ac.getUserById(parms.readInt("user")), parms.readList("attributes", Attribute.class)); return null; } else { throw new RpcException(RpcException.Type.MISSING_VALUE, "resource, vo, host or facility"); } } }, removeAttributes { @Override public Void call(ApiCaller ac, Deserializer parms) throws PerunException { ac.stateChangingCheck(); int[] ids = parms.readArrayOfInts("attributes"); List<AttributeDefinition> attributes = new ArrayList<AttributeDefinition>(ids.length); for(int i : ids) { attributes.add(ac.getAttributeDefinitionById(i)); } if (parms.contains("facility")){ if(parms.contains("resource") && parms.contains("user") && parms.contains("member")) { Facility facility = ac.getFacilityById(parms.readInt("facility")); Member member = ac.getMemberById(parms.readInt("member")); User user = ac.getUserById(parms.readInt("user")); Resource resource = ac.getResourceById(parms.readInt("resource")); ac.getAttributesManager().removeAttributes(ac.getSession(),facility, resource, user, member, attributes); } else { Facility facility = ac.getFacilityById(parms.readInt("facility")); ac.getAttributesManager().removeAttributes(ac.getSession(), facility, attributes); } } else if (parms.contains("vo")) { Vo vo = ac.getVoById(parms.readInt("vo")); ac.getAttributesManager().removeAttributes(ac.getSession(), vo, attributes); } else if (parms.contains("resource")) { Resource resource = ac.getResourceById(parms.readInt("resource")); if (parms.contains("member")) { Member member = ac.getMemberById(parms.readInt("member")); ac.getAttributesManager().removeAttributes(ac.getSession(), resource, member, attributes); } else if (parms.contains("group")) { Group group = ac.getGroupById(parms.readInt("group")); ac.getAttributesManager().removeAttributes(ac.getSession(), resource, group, attributes); } else { ac.getAttributesManager().removeAttributes(ac.getSession(), resource, attributes); } } else if (parms.contains("group")) { Group group = ac.getGroupById(parms.readInt("group")); ac.getAttributesManager().removeAttributes(ac.getSession(), group, attributes); } else if (parms.contains("host")) { Host host = ac.getHostById(parms.readInt("host")); ac.getAttributesManager().removeAttributes(ac.getSession(), host, attributes); } else if (parms.contains("member")) { if (parms.contains("workWithUserAttributes")) { Member member = ac.getMemberById(parms.readInt("member")); if(parms.readInt("workWithUserAttributes") != 1) { ac.getAttributesManager().removeAttributes(ac.getSession(), member, false, attributes); } else { ac.getAttributesManager().removeAttributes(ac.getSession(), member, true, attributes); } } else { Member member = ac.getMemberById(parms.readInt("member")); ac.getAttributesManager().removeAttributes(ac.getSession(), member, attributes); } } else if (parms.contains("user")) { User user = ac.getUserById(parms.readInt("user")); ac.getAttributesManager().removeAttributes(ac.getSession(), user, attributes); } else { throw new RpcException(RpcException.Type.MISSING_VALUE, "facility, vo, group, host, resource, member or user"); } return null; } }, removeAttribute { @Override public Void call(ApiCaller ac, Deserializer parms) throws PerunException { ac.stateChangingCheck(); if (parms.contains("facility")) { Facility facility = ac.getFacilityById(parms.readInt("facility")); if (parms.contains("user")) { ac.getAttributesManager().removeAttribute(ac.getSession(), facility, ac.getUserById(parms.readInt("user")), ac.getAttributeDefinitionById(parms.readInt("attribute"))); } else { ac.getAttributesManager().removeAttribute(ac.getSession(), facility, ac.getAttributeDefinitionById(parms.readInt("attribute"))); } return null; } else if (parms.contains("vo")) { Vo vo = ac.getVoById(parms.readInt("vo")); ac.getAttributesManager().removeAttribute(ac.getSession(), vo, ac.getAttributeDefinitionById(parms.readInt("attribute"))); return null; } else if (parms.contains("resource")) { if (parms.contains("member")) { Resource resource = ac.getResourceById(parms.readInt("resource")); ac.getAttributesManager().removeAttribute(ac.getSession(), resource, ac.getMemberById(parms.readInt("member")), ac.getAttributeDefinitionById(parms.readInt("attribute"))); return null; } else if(parms.contains("group")) { ac.getAttributesManager().removeAttribute(ac.getSession(), ac.getResourceById(parms.readInt("resource")), ac.getGroupById(parms.readInt("group")), ac.getAttributeDefinitionById(parms.readInt("attribute"))); return null; } else { Resource resource = ac.getResourceById(parms.readInt("resource")); ac.getAttributesManager().removeAttribute(ac.getSession(), resource, ac.getAttributeDefinitionById(parms.readInt("attribute"))); return null; } } else if (parms.contains("member")) { ac.getAttributesManager().removeAttribute(ac.getSession(), ac.getMemberById(parms.readInt("member")), ac.getAttributeDefinitionById(parms.readInt("attribute"))); return null; } else if (parms.contains("user")) { ac.getAttributesManager().removeAttribute(ac.getSession(), ac.getUserById(parms.readInt("user")), ac.getAttributeDefinitionById(parms.readInt("attribute"))); return null; } else if (parms.contains("group")) { ac.getAttributesManager().removeAttribute(ac.getSession(), ac.getGroupById(parms.readInt("group")), ac.getAttributeDefinitionById(parms.readInt("attribute"))); return null; } else if (parms.contains("host")) { ac.getAttributesManager().removeAttribute(ac.getSession(), ac.getHostById(parms.readInt("host")), ac.getAttributeDefinitionById(parms.readInt("attribute"))); return null; } else { throw new RpcException(RpcException.Type.MISSING_VALUE, "facility, vo, group, resource, member, host or user"); } } }, removeAllAttributes { @Override public Void call(ApiCaller ac, Deserializer parms) throws PerunException { ac.stateChangingCheck(); if (parms.contains("facility")) { Facility facility = ac.getFacilityById(parms.readInt("facility")); if (parms.contains("user")) { ac.getAttributesManager().removeAllAttributes(ac.getSession(), facility, ac.getUserById(parms.readInt("user"))); } else if (parms.contains("removeAlsoUserFacilityAttributes")) { ac.getAttributesManager().removeAllAttributes(ac.getSession(), facility, parms.readInt("workWithUserAttributes") == 1); } else { ac.getAttributesManager().removeAllAttributes(ac.getSession(), facility); } return null; } else if (parms.contains("vo")) { Vo vo = ac.getVoById(parms.readInt("vo")); ac.getAttributesManager().removeAllAttributes(ac.getSession(), vo); return null; } else if (parms.contains("resource")) { if (parms.contains("member")) { ac.getAttributesManager().removeAllAttributes(ac.getSession(), ac.getResourceById(parms.readInt("resource")), ac.getMemberById(parms.readInt("member"))); return null; }else if(parms.contains("group")) { ac.getAttributesManager().removeAllAttributes(ac.getSession(), ac.getResourceById(parms.readInt("resource")), ac.getGroupById(parms.readInt("group"))); return null; } else { ac.getAttributesManager().removeAllAttributes(ac.getSession(), ac.getResourceById(parms.readInt("resource"))); return null; } } else if (parms.contains("member")) { ac.getAttributesManager().removeAllAttributes(ac.getSession(), ac.getMemberById(parms.readInt("member"))); return null; } else if (parms.contains("user")) { ac.getAttributesManager().removeAllAttributes(ac.getSession(), ac.getUserById(parms.readInt("user"))); return null; } else if (parms.contains("group")) { ac.getAttributesManager().removeAllAttributes(ac.getSession(), ac.getGroupById(parms.readInt("group"))); return null; } else if (parms.contains("host")) { ac.getAttributesManager().removeAllAttributes(ac.getSession(), ac.getHostById(parms.readInt("host"))); return null; } else { throw new RpcException(RpcException.Type.MISSING_VALUE, "facility, resource, vo, group, member, host or user"); } } }, getLogins { @Override public List<Attribute> call(ApiCaller ac, Deserializer parms) throws PerunException { return ac.getAttributesManager().getLogins(ac.getSession(), ac.getUserById(parms.readInt("user"))); } }, updateAttributeDefinition { @Override public AttributeDefinition call(ApiCaller ac, Deserializer parms) throws PerunException { ac.stateChangingCheck(); return ac.getAttributesManager().updateAttributeDefinition(ac.getSession(), parms.read("attributeDefinition", AttributeDefinition.class)); } }, doTheMagic { @Override public Void call(ApiCaller ac, Deserializer parms) throws PerunException { ac.getAttributesManager().doTheMagic(ac.getSession(), ac.getMemberById(parms.readInt("member"))); return null; } }, /*# * Gets attribute rights of an attribute with id given as a parametr. * If the attribute has no rights for a role, it returns empty list. * * @param attributeId id of the attribute * @return all rights of the attribute */ getAttributeRights { @Override public List<AttributeRights> call(ApiCaller ac, Deserializer parms) throws PerunException { return ac.getAttributesManager().getAttributeRights(ac.getSession(), parms.readInt("attributeId")); } }, /*# * Sets all attribute rights in the list given as a parametr. * * @param rights list of attribute rights */ setAttributeRights { @Override public Void call(ApiCaller ac, Deserializer parms) throws PerunException { ac.stateChangingCheck(); ac.getAttributesManager().setAttributeRights(ac.getSession(), parms.readList("rights", AttributeRights.class)); return null; } }; }
package com.intellij.codeInsight.daemon.impl; import com.intellij.codeHighlighting.HighlightDisplayLevel; import com.intellij.codeHighlighting.Pass; import com.intellij.codeInsight.daemon.DaemonBundle; import com.intellij.codeInsight.daemon.HighlightDisplayKey; import com.intellij.codeInsight.daemon.impl.analysis.DaemonTooltipsUtil; import com.intellij.codeInsight.daemon.impl.analysis.HighlightingLevelManager; import com.intellij.codeInsight.daemon.impl.quickfix.QuickFixAction; import com.intellij.codeInsight.intention.EmptyIntentionAction; import com.intellij.codeInsight.intention.IntentionAction; import com.intellij.codeInspection.*; import com.intellij.codeInspection.ex.*; import com.intellij.codeInspection.ui.InspectionToolPresentation; import com.intellij.concurrency.JobLauncher; import com.intellij.diagnostic.PluginException; import com.intellij.injected.editor.DocumentWindow; import com.intellij.lang.Language; import com.intellij.lang.annotation.HighlightSeverity; import com.intellij.lang.annotation.ProblemGroup; import com.intellij.lang.injection.InjectedLanguageManager; import com.intellij.openapi.actionSystem.IdeActions; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.RangeMarker; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.colors.TextAttributesKey; import com.intellij.openapi.editor.markup.TextAttributes; import com.intellij.openapi.keymap.Keymap; import com.intellij.openapi.keymap.KeymapManager; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.util.NlsContexts; import com.intellij.openapi.util.NlsSafe; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.profile.codeInspection.ProjectInspectionProfileManager; import com.intellij.psi.*; import com.intellij.util.*; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.Interner; import com.intellij.util.containers.SmartHashSet; import com.intellij.xml.util.XmlStringUtil; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.function.Function; import java.util.function.Predicate; import static com.intellij.codeInspection.ex.InspectListener.InspectionKind.LOCAL; import static com.intellij.codeInspection.ex.InspectListener.InspectionKind.LOCAL_PRIORITY; import static com.intellij.codeInspection.ex.InspectionEventsKt.reportWhenInspectionFinished; public class LocalInspectionsPass extends ProgressableTextEditorHighlightingPass { private static final Logger LOG = Logger.getInstance(LocalInspectionsPass.class); public static final TextRange EMPTY_PRIORITY_RANGE = TextRange.EMPTY_RANGE; private static final Predicate<PsiFile> SHOULD_INSPECT_FILTER = file -> HighlightingLevelManager.getInstance(file.getProject()).shouldInspect(file); private final TextRange myPriorityRange; private final boolean myIgnoreSuppressed; private final ConcurrentMap<PsiFile, List<InspectionResult>> result = new ConcurrentHashMap<>(); private final InspectListener myInspectTopicPublisher; private volatile List<HighlightInfo> myInfos = Collections.emptyList(); private final String myShortcutText; private final SeverityRegistrar mySeverityRegistrar; private final InspectionProfileWrapper myProfileWrapper; private final Map<String, Set<PsiElement>> mySuppressedElements = new ConcurrentHashMap<>(); private final boolean myInspectInjectedPsi; public LocalInspectionsPass(@NotNull PsiFile file, @NotNull Document document, int startOffset, int endOffset, @NotNull TextRange priorityRange, boolean ignoreSuppressed, @NotNull HighlightInfoProcessor highlightInfoProcessor, boolean inspectInjectedPsi) { super(file.getProject(), document, getPresentableNameText(), file, null, new TextRange(startOffset, endOffset), true, highlightInfoProcessor); assert file.isPhysical() : "can't inspect non-physical file: " + file + "; " + file.getVirtualFile(); myPriorityRange = priorityRange; myIgnoreSuppressed = ignoreSuppressed; setId(Pass.LOCAL_INSPECTIONS); final KeymapManager keymapManager = KeymapManager.getInstance(); if (keymapManager != null) { final Keymap keymap = keymapManager.getActiveKeymap(); myShortcutText = "(" + KeymapUtil.getShortcutsText(keymap.getShortcuts(IdeActions.ACTION_SHOW_ERROR_DESCRIPTION)) + ")"; } else { myShortcutText = ""; } InspectionProfileImpl profileToUse = ProjectInspectionProfileManager.getInstance(myProject).getCurrentProfile(); Function<InspectionProfileImpl, InspectionProfileWrapper> custom = file.getUserData(InspectionProfileWrapper.CUSTOMIZATION_KEY); myProfileWrapper = custom == null ? new InspectionProfileWrapper(profileToUse) : custom.apply(profileToUse); assert myProfileWrapper != null; mySeverityRegistrar = myProfileWrapper.getProfileManager().getSeverityRegistrar(); myInspectInjectedPsi = inspectInjectedPsi; myInspectTopicPublisher = myProject.getMessageBus().syncPublisher(GlobalInspectionContextEx.INSPECT_TOPIC); // initial guess setProgressLimit(300 * 2); } private @NotNull PsiFile getFile() { return myFile; } @Override protected void collectInformationWithProgress(@NotNull ProgressIndicator progress) { try { if (!HighlightingLevelManager.getInstance(myProject).shouldInspect(getFile())) { return; } inspect(getInspectionTools(myProfileWrapper), InspectionManager.getInstance(myProject), true, progress); } finally { disposeDescriptors(); } } private void disposeDescriptors() { result.clear(); } private static final Set<String> ourToolsWithInformationProblems = new HashSet<>(); public void doInspectInBatch(final @NotNull GlobalInspectionContextImpl context, final @NotNull InspectionManager iManager, final @NotNull List<? extends LocalInspectionToolWrapper> toolWrappers) { final ProgressIndicator progress = ProgressManager.getInstance().getProgressIndicator(); inspect(new ArrayList<>(toolWrappers), iManager, false, progress); addDescriptorsFromInjectedResults(context); List<InspectionResult> resultList = result.get(getFile()); if (resultList == null) return; for (InspectionResult inspectionResult : resultList) { LocalInspectionToolWrapper toolWrapper = inspectionResult.tool; final String shortName = toolWrapper.getShortName(); for (ProblemDescriptor descriptor : inspectionResult.foundProblems) { if (descriptor.getHighlightType() == ProblemHighlightType.INFORMATION) { if (ourToolsWithInformationProblems.add(shortName)) { String message = "Tool #" + shortName + " registers INFORMATION level problem in batch mode on " + getFile() + ". " + "INFORMATION level 'warnings' are invisible in the editor and should not become visible in batch mode. " + "Moreover, cause INFORMATION level fixes act more like intention actions, they could e.g. change semantics and " + "thus should not be suggested for batch transformations"; LocalInspectionEP extension = toolWrapper.getExtension(); if (extension != null) { LOG.error(new PluginException(message, extension.getPluginDescriptor().getPluginId())); } else { LOG.error(message); } } continue; } addDescriptors(toolWrapper, descriptor, context); } } } private void addDescriptors(@NotNull LocalInspectionToolWrapper toolWrapper, @NotNull ProblemDescriptor descriptor, @NotNull GlobalInspectionContextImpl context) { InspectionToolPresentation toolPresentation = context.getPresentation(toolWrapper); BatchModeDescriptorsUtil.addProblemDescriptors(Collections.singletonList(descriptor), toolPresentation, myIgnoreSuppressed, context, toolWrapper.getTool()); } private void addDescriptorsFromInjectedResults(@NotNull GlobalInspectionContextImpl context) { for (Map.Entry<PsiFile, List<InspectionResult>> entry : result.entrySet()) { PsiFile file = entry.getKey(); if (file == getFile()) continue; // not injected List<InspectionResult> resultList = entry.getValue(); for (InspectionResult inspectionResult : resultList) { LocalInspectionToolWrapper toolWrapper = inspectionResult.tool; for (ProblemDescriptor descriptor : inspectionResult.foundProblems) { PsiElement psiElement = descriptor.getPsiElement(); if (psiElement == null) continue; if (toolWrapper.getTool().isSuppressedFor(psiElement)) continue; addDescriptors(toolWrapper, descriptor, context); } } } } private void inspect(@NotNull List<? extends LocalInspectionToolWrapper> toolWrappers, final @NotNull InspectionManager iManager, final boolean isOnTheFly, final @NotNull ProgressIndicator progress) { if (toolWrappers.isEmpty()) return; List<Divider.DividedElements> allDivided = new ArrayList<>(); Divider.divideInsideAndOutsideAllRoots(myFile, myRestrictRange, myPriorityRange, SHOULD_INSPECT_FILTER, new CommonProcessors.CollectProcessor<>(allDivided)); List<PsiElement> inside = ContainerUtil.concat((List<List<PsiElement>>)ContainerUtil.map(allDivided, d -> d.inside)); List<PsiElement> outside = ContainerUtil.concat((List<List<PsiElement>>)ContainerUtil.map(allDivided, d -> ContainerUtil.concat(d.outside, d.parents))); setProgressLimit(toolWrappers.size() * 2L); final LocalInspectionToolSession session = new LocalInspectionToolSession(getFile(), myRestrictRange.getStartOffset(), myRestrictRange.getEndOffset()); List<InspectionContext> init = visitPriorityElementsAndInit( InspectionEngine.filterToolsApplicableByLanguage(toolWrappers, InspectionEngine.calcElementDialectIds(inside, outside)), iManager, isOnTheFly, progress, inside, session); Set<PsiFile> alreadyVisitedInjected = inspectInjectedPsi(inside, isOnTheFly, progress, iManager, true, toolWrappers, Collections.emptySet()); visitRestElementsAndCleanup(progress, outside, session, init, isOnTheFly); inspectInjectedPsi(outside, isOnTheFly, progress, iManager, false, toolWrappers, alreadyVisitedInjected); ProgressManager.checkCanceled(); myInfos = new ArrayList<>(); addHighlightsFromResults(myInfos); if (isOnTheFly) { highlightRedundantSuppressions(toolWrappers, iManager, inside, outside); } } private void highlightRedundantSuppressions(@NotNull List<? extends LocalInspectionToolWrapper> toolWrappers, @NotNull InspectionManager iManager, @NotNull List<? extends PsiElement> inside, @NotNull List<? extends PsiElement> outside) { HighlightDisplayKey key = HighlightDisplayKey.find(RedundantSuppressInspection.SHORT_NAME); final InspectionProfile inspectionProfile = myProfileWrapper.getInspectionProfile(); if (key != null && inspectionProfile.isToolEnabled(key, getFile())) { InspectionToolWrapper<?,?> toolWrapper = inspectionProfile.getInspectionTool(RedundantSuppressInspection.SHORT_NAME, getFile()); Language fileLanguage = getFile().getLanguage(); InspectionSuppressor suppressor = LanguageInspectionSuppressors.INSTANCE.forLanguage(fileLanguage); if (suppressor instanceof RedundantSuppressionDetector) { if (toolWrappers.stream().anyMatch(LocalInspectionToolWrapper::runForWholeFile)) { return; } Set<String> activeTools = new HashSet<>(); for (LocalInspectionToolWrapper tool : toolWrappers) { if (tool.isUnfair() || !tool.isApplicable(fileLanguage) || myProfileWrapper.getInspectionTool(tool.getShortName(), myFile) instanceof GlobalInspectionToolWrapper) { continue; } activeTools.add(tool.getID()); ContainerUtil.addIfNotNull(activeTools, tool.getAlternativeID()); InspectionElementsMerger elementsMerger = InspectionElementsMerger.getMerger(tool.getShortName()); if (elementsMerger != null) { activeTools.addAll(Arrays.asList(elementsMerger.getSuppressIds())); } } LocalInspectionTool localTool = ((RedundantSuppressInspection)toolWrapper.getTool()).createLocalTool((RedundantSuppressionDetector)suppressor, mySuppressedElements, activeTools); ProblemsHolder holder = new ProblemsHolder(iManager, getFile(), true); PsiElementVisitor visitor = localTool.buildVisitor(holder, true); InspectionEngine.acceptElements(inside, visitor); InspectionEngine.acceptElements(outside, visitor); HighlightSeverity severity = myProfileWrapper.getErrorLevel(key, getFile()).getSeverity(); for (ProblemDescriptor descriptor : holder.getResults()) { ProgressManager.checkCanceled(); PsiElement element = descriptor.getPsiElement(); if (element != null) { Document thisDocument = documentManager.getDocument(getFile()); createHighlightsForDescriptor(myInfos, emptyActionRegistered, ilManager, getFile(), thisDocument, new LocalInspectionToolWrapper(localTool), severity, descriptor, element, false); } } } } } private @NotNull List<InspectionContext> visitPriorityElementsAndInit(@NotNull List<? extends LocalInspectionToolWrapper> wrappers, final @NotNull InspectionManager iManager, final boolean isOnTheFly, final @NotNull ProgressIndicator indicator, final @NotNull List<? extends PsiElement> elements, final @NotNull LocalInspectionToolSession session) { final List<InspectionContext> init = new ArrayList<>(); PsiFile file = session.getFile(); Processor<LocalInspectionToolWrapper> processor = toolWrapper -> AstLoadingFilter.disallowTreeLoading(() -> AstLoadingFilter.<Boolean, RuntimeException>forceAllowTreeLoading(file, () -> { if (elements.isEmpty() || isOnTheFly) { runToolOnElements(toolWrapper, iManager, isOnTheFly, indicator, elements, session, init); } else { reportWhenInspectionFinished( myInspectTopicPublisher, toolWrapper, LOCAL_PRIORITY, () -> { runToolOnElements(toolWrapper, iManager, false, indicator, elements, session, init); }); } return true; })); if (!JobLauncher.getInstance().invokeConcurrentlyUnderProgress(wrappers, indicator, processor)) { throw new ProcessCanceledException(); } return init; } private void runToolOnElements(final @NotNull LocalInspectionToolWrapper toolWrapper, final @NotNull InspectionManager iManager, final boolean isOnTheFly, final @NotNull ProgressIndicator indicator, final @NotNull List<? extends PsiElement> elements, final @NotNull LocalInspectionToolSession session, @NotNull List<? super InspectionContext> init) { ProgressManager.checkCanceled(); ApplicationManager.getApplication().assertReadAccessAllowed(); final LocalInspectionTool tool = toolWrapper.getTool(); final boolean[] applyIncrementally = {isOnTheFly}; ProblemsHolder holder = new ProblemsHolder(iManager, getFile(), isOnTheFly) { @Override public void registerProblem(@NotNull ProblemDescriptor descriptor) { super.registerProblem(descriptor); if (applyIncrementally[0]) { addDescriptorIncrementally(descriptor, toolWrapper, indicator); } } }; PsiElementVisitor visitor = InspectionEngine.createVisitorAndAcceptElements(tool, holder, isOnTheFly, session, elements); // if inspection returned empty visitor then it should be skipped if (visitor != PsiElementVisitor.EMPTY_VISITOR) { synchronized (init) { init.add(new InspectionContext(toolWrapper, holder, holder.getResultCount(), visitor)); } } advanceProgress(1); if (holder.hasResults()) { appendDescriptors(getFile(), holder.getResults(), toolWrapper); } applyIncrementally[0] = false; // do not apply incrementally outside visible range } private void visitRestElementsAndCleanup(final @NotNull ProgressIndicator indicator, final @NotNull List<? extends PsiElement> elements, final @NotNull LocalInspectionToolSession session, @NotNull List<? extends InspectionContext> init, final boolean isOnTheFly) { Processor<InspectionContext> processor = context -> { ProgressManager.checkCanceled(); ApplicationManager.getApplication().assertReadAccessAllowed(); if (isOnTheFly) { AstLoadingFilter.disallowTreeLoading(() -> InspectionEngine.acceptElements(elements, context.visitor)); } else { reportWhenInspectionFinished( myInspectTopicPublisher, context.tool, LOCAL, () -> { AstLoadingFilter.disallowTreeLoading(() -> InspectionEngine.acceptElements(elements, context.visitor)); }); } advanceProgress(1); context.tool.getTool().inspectionFinished(session, context.holder); if (context.holder.hasResults()) { List<ProblemDescriptor> allProblems = context.holder.getResults(); List<ProblemDescriptor> restProblems = allProblems.subList(context.problemsSize, allProblems.size()); appendDescriptors(getFile(), restProblems, context.tool); } return true; }; if (!JobLauncher.getInstance().invokeConcurrentlyUnderProgress(init, indicator, processor)) { throw new ProcessCanceledException(); } } private @NotNull Set<PsiFile> inspectInjectedPsi(final @NotNull List<? extends PsiElement> elements, final boolean onTheFly, final @NotNull ProgressIndicator indicator, final @NotNull InspectionManager iManager, final boolean inVisibleRange, final @NotNull List<? extends LocalInspectionToolWrapper> wrappers, @NotNull Set<? extends PsiFile> alreadyVisitedInjected) { if (!myInspectInjectedPsi) return Collections.emptySet(); Set<PsiFile> injected = new HashSet<>(); for (PsiElement element : elements) { PsiFile containingFile = getFile(); InjectedLanguageManager.getInstance(containingFile.getProject()).enumerateEx(element, containingFile, false, (injectedPsi, places) -> injected.add(injectedPsi)); } injected.removeAll(alreadyVisitedInjected); if (!injected.isEmpty()) { Processor<PsiFile> processor = injectedPsi -> { doInspectInjectedPsi(injectedPsi, onTheFly, indicator, iManager, inVisibleRange, wrappers); return true; }; if (!JobLauncher.getInstance().invokeConcurrentlyUnderProgress(new ArrayList<>(injected), indicator, processor)) { throw new ProcessCanceledException(); } } return injected; } private static final TextAttributes NONEMPTY_TEXT_ATTRIBUTES = new TextAttributes() { @Override public boolean isEmpty() { return false; } }; private @Nullable HighlightInfo highlightInfoFromDescriptor(@NotNull ProblemDescriptor problemDescriptor, @NotNull HighlightInfoType highlightInfoType, @NotNull @NlsContexts.DetailedDescription String message, @Nullable @NlsContexts.Tooltip String toolTip, @NotNull PsiElement psiElement, @NotNull List<IntentionAction> quickFixes, @NotNull String toolID) { TextRange textRange = ((ProblemDescriptorBase)problemDescriptor).getTextRange(); if (textRange == null) return null; boolean isFileLevel = psiElement instanceof PsiFile && textRange.equals(psiElement.getTextRange()); final HighlightSeverity severity = highlightInfoType.getSeverity(psiElement); TextAttributesKey attributesKey = ((ProblemDescriptorBase)problemDescriptor).getEnforcedTextAttributes(); TextAttributes attributes = attributesKey == null || getColorsScheme() == null ? mySeverityRegistrar.getTextAttributesBySeverity(severity) : getColorsScheme().getAttributes(attributesKey); HighlightInfo.Builder b = HighlightInfo.newHighlightInfo(highlightInfoType) .range(psiElement, textRange.getStartOffset(), textRange.getEndOffset()) .description(message) .severity(severity) .inspectionToolId(toolID); if (toolTip != null) b.escapedToolTip(toolTip); if (HighlightSeverity.INFORMATION.equals(severity) && attributes == null && toolTip == null && !quickFixes.isEmpty()) { // Hack to avoid filtering this info out in HighlightInfoFilterImpl even though its attributes are empty. // But it has quick fixes so it needs to be created. attributes = NONEMPTY_TEXT_ATTRIBUTES; } if (attributes != null) b.textAttributes(attributes); if (problemDescriptor.isAfterEndOfLine()) b.endOfLine(); if (isFileLevel) b.fileLevelAnnotation(); if (problemDescriptor.getProblemGroup() != null) b.problemGroup(problemDescriptor.getProblemGroup()); return b.create(); } private final Map<TextRange, RangeMarker> ranges2markersCache = new HashMap<>(); // accessed in EDT only private final InjectedLanguageManager ilManager = InjectedLanguageManager.getInstance(myProject); private final List<HighlightInfo> infos = new ArrayList<>(2); // accessed in EDT only private final PsiDocumentManager documentManager = PsiDocumentManager.getInstance(myProject); private final Set<Pair<TextRange, String>> emptyActionRegistered = Collections.synchronizedSet(new HashSet<>()); private void addDescriptorIncrementally(final @NotNull ProblemDescriptor descriptor, final @NotNull LocalInspectionToolWrapper tool, final @NotNull ProgressIndicator indicator) { if (myIgnoreSuppressed) { LocalInspectionToolWrapper toolWrapper = tool; PsiElement psiElement = descriptor.getPsiElement(); if (descriptor instanceof ProblemDescriptorWithReporterName) { String reportingToolName = ((ProblemDescriptorWithReporterName)descriptor).getReportingToolName(); toolWrapper = (LocalInspectionToolWrapper)myProfileWrapper.getInspectionTool(reportingToolName, psiElement); } if (toolWrapper.getTool().isSuppressedFor(psiElement)) { registerSuppressedElements(psiElement, toolWrapper.getID(), toolWrapper.getAlternativeID()); return; } } ApplicationManager.getApplication().invokeLater(()->{ PsiElement psiElement = descriptor.getPsiElement(); if (psiElement == null) return; PsiFile file = psiElement.getContainingFile(); Document thisDocument = documentManager.getDocument(file); HighlightSeverity severity = myProfileWrapper.getErrorLevel(tool.getDisplayKey(), file).getSeverity(); infos.clear(); createHighlightsForDescriptor(infos, emptyActionRegistered, ilManager, file, thisDocument, tool, severity, descriptor, psiElement); for (HighlightInfo info : infos) { final EditorColorsScheme colorsScheme = getColorsScheme(); UpdateHighlightersUtil.addHighlighterToEditorIncrementally(myProject, myDocument, getFile(), myRestrictRange.getStartOffset(), myRestrictRange.getEndOffset(), info, colorsScheme, getId(), ranges2markersCache); } }, __->myProject.isDisposed() || indicator.isCanceled()); } private void appendDescriptors(@NotNull PsiFile file, @NotNull List<? extends ProblemDescriptor> descriptors, @NotNull LocalInspectionToolWrapper tool) { for (ProblemDescriptor descriptor : descriptors) { if (descriptor == null) { LOG.error("null descriptor. all descriptors(" + descriptors.size() +"): " + descriptors + "; file: " + file + " (" + file.getVirtualFile() +"); tool: " + tool); } } InspectionResult result = new InspectionResult(tool, descriptors); appendResult(file, result); } private void appendResult(@NotNull PsiFile file, @NotNull InspectionResult result) { List<InspectionResult> resultList = this.result.get(file); if (resultList == null) { resultList = ConcurrencyUtil.cacheOrGet(this.result, file, new ArrayList<>()); } synchronized (resultList) { resultList.add(result); } } @Override protected void applyInformationWithProgress() { UpdateHighlightersUtil.setHighlightersToEditor(myProject, myDocument, myRestrictRange.getStartOffset(), myRestrictRange.getEndOffset(), myInfos, getColorsScheme(), getId()); } private void addHighlightsFromResults(@NotNull List<? super HighlightInfo> outInfos) { PsiDocumentManager documentManager = PsiDocumentManager.getInstance(myProject); InjectedLanguageManager ilManager = InjectedLanguageManager.getInstance(myProject); Set<Pair<TextRange, String>> emptyActionRegistered = new HashSet<>(); for (Map.Entry<PsiFile, List<InspectionResult>> entry : result.entrySet()) { ProgressManager.checkCanceled(); PsiFile file = entry.getKey(); Document documentRange = documentManager.getDocument(file); if (documentRange == null) continue; List<InspectionResult> resultList = entry.getValue(); synchronized (resultList) { for (InspectionResult inspectionResult : resultList) { ProgressManager.checkCanceled(); LocalInspectionToolWrapper tool = inspectionResult.tool; HighlightSeverity severity = myProfileWrapper.getErrorLevel(tool.getDisplayKey(), file).getSeverity(); for (ProblemDescriptor descriptor : inspectionResult.foundProblems) { ProgressManager.checkCanceled(); PsiElement element = descriptor.getPsiElement(); if (element != null) { createHighlightsForDescriptor(outInfos, emptyActionRegistered, ilManager, file, documentRange, tool, severity, descriptor, element, myIgnoreSuppressed); } } } } } } private void createHighlightsForDescriptor(@NotNull List<? super HighlightInfo> outInfos, @NotNull Set<? super Pair<TextRange, String>> emptyActionRegistered, @NotNull InjectedLanguageManager ilManager, @NotNull PsiFile file, @NotNull Document documentRange, @NotNull LocalInspectionToolWrapper toolWrapper, @NotNull HighlightSeverity severity, @NotNull ProblemDescriptor descriptor, @NotNull PsiElement element, boolean ignoreSuppressed) { if (descriptor instanceof ProblemDescriptorWithReporterName) { String reportingToolName = ((ProblemDescriptorWithReporterName)descriptor).getReportingToolName(); final InspectionToolWrapper<?, ?> reportingTool = myProfileWrapper.getInspectionTool(reportingToolName, element); LOG.assertTrue(reportingTool instanceof LocalInspectionToolWrapper, reportingToolName); toolWrapper = (LocalInspectionToolWrapper)reportingTool; severity = myProfileWrapper.getErrorLevel(HighlightDisplayKey.find(reportingToolName), file).getSeverity(); } LocalInspectionTool tool = toolWrapper.getTool(); if (ignoreSuppressed && tool.isSuppressedFor(element)) { registerSuppressedElements(element, toolWrapper.getID(), toolWrapper.getAlternativeID()); return; } createHighlightsForDescriptor(outInfos, emptyActionRegistered, ilManager, file, documentRange, toolWrapper, severity, descriptor, element); } private void createHighlightsForDescriptor(@NotNull List<? super HighlightInfo> outInfos, @NotNull Set<? super Pair<TextRange, String>> emptyActionRegistered, @NotNull InjectedLanguageManager ilManager, @NotNull PsiFile file, @NotNull Document documentRange, @NotNull LocalInspectionToolWrapper toolWrapper, @NotNull HighlightSeverity severity, @NotNull ProblemDescriptor descriptor, @NotNull PsiElement element) { HighlightInfoType level = ProblemDescriptorUtil.highlightTypeFromDescriptor(descriptor, severity, mySeverityRegistrar); @NlsSafe String message = ProblemDescriptorUtil.renderDescriptionMessage(descriptor, element); ProblemGroup problemGroup = descriptor.getProblemGroup(); String problemName = problemGroup != null ? problemGroup.getProblemName() : null; String shortName = problemName != null ? problemName : toolWrapper.getShortName(); final HighlightDisplayKey key = HighlightDisplayKey.find(shortName); final InspectionProfile inspectionProfile = myProfileWrapper.getInspectionProfile(); if (!inspectionProfile.isToolEnabled(key, getFile())) return; HighlightInfoType type = new InspectionHighlightInfoType(level, element); final String plainMessage = message.startsWith("<html>") ? StringUtil.unescapeXmlEntities(XmlStringUtil.stripHtml(message).replaceAll("<[^>]*>", "")) .replaceAll("&nbsp;", " ") : message; @NlsSafe String tooltip = null; if (descriptor.showTooltip()) { tooltip = tooltips.intern(DaemonTooltipsUtil.getWrappedTooltip(message, shortName, myShortcutText, showToolDescription(toolWrapper))); } List<IntentionAction> fixes = getQuickFixes(key, descriptor, emptyActionRegistered); HighlightInfo info = highlightInfoFromDescriptor(descriptor, type, plainMessage, tooltip, element, fixes, key.getID()); if (info == null) return; registerQuickFixes(info, fixes, shortName); PsiFile context = getTopLevelFileInBaseLanguage(element); PsiFile myContext = getTopLevelFileInBaseLanguage(getFile()); if (context != getFile()) { String errorMessage = "Reported element " + element + " is not from the file '" + file.getVirtualFile().getPath() + "' the inspection '" + shortName + "' (" + toolWrapper.getTool().getClass() + ") was invoked for. Message: '" + descriptor + "'.\nElement containing file: " + context + "\nInspection invoked for file: " + myContext + "\n"; PluginException.logPluginError(LOG, errorMessage, null, toolWrapper.getTool().getClass()); } boolean isOutsideInjected = !myInspectInjectedPsi || file == getFile(); if (isOutsideInjected) { outInfos.add(info); return; } injectToHost(outInfos, ilManager, file, documentRange, element, fixes, info, shortName); } private void registerSuppressedElements(@NotNull PsiElement element, String id, String alternativeID) { mySuppressedElements.computeIfAbsent(id, shortName -> new HashSet<>()).add(element); if (alternativeID != null) { mySuppressedElements.computeIfAbsent(alternativeID, shortName -> new HashSet<>()).add(element); } } private static void injectToHost(@NotNull List<? super HighlightInfo> outInfos, @NotNull InjectedLanguageManager ilManager, @NotNull PsiFile file, @NotNull Document documentRange, @NotNull PsiElement element, @NotNull List<? extends IntentionAction> fixes, @NotNull HighlightInfo info, String shortName) { // todo we got to separate our "internal" prefixes/suffixes from user-defined ones // todo in the latter case the errors should be highlighted, otherwise not List<TextRange> editables = ilManager.intersectWithAllEditableFragments(file, new TextRange(info.startOffset, info.endOffset)); for (TextRange editable : editables) { TextRange hostRange = ((DocumentWindow)documentRange).injectedToHost(editable); int start = hostRange.getStartOffset(); int end = hostRange.getEndOffset(); HighlightInfo.Builder builder = HighlightInfo.newHighlightInfo(info.type).range(element, start, end); String description = info.getDescription(); if (description != null) { builder.description(description); } String toolTip = info.getToolTip(); if (toolTip != null) { builder.escapedToolTip(toolTip); } HighlightInfo patched = builder.createUnconditionally(); if (patched.startOffset != patched.endOffset || info.startOffset == info.endOffset) { patched.setFromInjection(true); registerQuickFixes(patched, fixes, shortName); outInfos.add(patched); } } } private PsiFile getTopLevelFileInBaseLanguage(@NotNull PsiElement element) { PsiFile file = InjectedLanguageManager.getInstance(myProject).getTopLevelFile(element); FileViewProvider viewProvider = file.getViewProvider(); return viewProvider.getPsi(viewProvider.getBaseLanguage()); } private static final Interner<String> tooltips = Interner.createWeakInterner(); private static boolean showToolDescription(@NotNull LocalInspectionToolWrapper tool) { String staticDescription = tool.getStaticDescription(); return staticDescription == null || !staticDescription.isEmpty(); } private static void registerQuickFixes(@NotNull HighlightInfo highlightInfo, @NotNull List<? extends IntentionAction> quickFixes, String shortName) { final HighlightDisplayKey key = HighlightDisplayKey.find(shortName); for (IntentionAction quickFix : quickFixes) { QuickFixAction.registerQuickFixAction(highlightInfo, quickFix, key); } } private static @NotNull List<IntentionAction> getQuickFixes(@NotNull HighlightDisplayKey key, @NotNull ProblemDescriptor descriptor, @NotNull Set<? super Pair<TextRange, String>> emptyActionRegistered) { List<IntentionAction> result = new SmartList<>(); boolean needEmptyAction = true; QuickFix[] fixes = descriptor.getFixes(); if (fixes != null && fixes.length != 0) { for (int k = 0; k < fixes.length; k++) { QuickFix fix = fixes[k]; if (fix == null) throw new IllegalStateException("Inspection " + key + " returns null quick fix in its descriptor: " + descriptor + "; array: " + Arrays.toString(fixes)); result.add(QuickFixWrapper.wrap(descriptor, k)); needEmptyAction = false; } } HintAction hintAction = descriptor instanceof ProblemDescriptorImpl ? ((ProblemDescriptorImpl)descriptor).getHintAction() : null; if (hintAction != null) { result.add(hintAction); needEmptyAction = false; } if (((ProblemDescriptorBase)descriptor).getEnforcedTextAttributes() != null) { needEmptyAction = false; } if (needEmptyAction && emptyActionRegistered.add(Pair.create(((ProblemDescriptorBase)descriptor).getTextRange(), key.toString()))) { String displayNameByKey = HighlightDisplayKey.getDisplayNameByKey(key); LOG.assertTrue(displayNameByKey != null, key.toString()); IntentionAction emptyIntentionAction = new EmptyIntentionAction(displayNameByKey); result.add(emptyIntentionAction); } return result; } private static void getElementsAndDialectsFrom(@NotNull PsiFile file, @NotNull List<? super PsiElement> outElements, @NotNull Set<? super String> outDialects) { final FileViewProvider viewProvider = file.getViewProvider(); Set<Language> processedLanguages = new SmartHashSet<>(); final PsiElementVisitor visitor = new PsiRecursiveElementVisitor() { @Override public void visitElement(@NotNull PsiElement element) { ProgressManager.checkCanceled(); PsiElement child = element.getFirstChild(); while (child != null) { outElements.add(child); child.accept(this); appendDialects(child, processedLanguages, outDialects); child = child.getNextSibling(); } } }; for (Language language : viewProvider.getLanguages()) { final PsiFile psiRoot = viewProvider.getPsi(language); if (psiRoot == null || !HighlightingLevelManager.getInstance(file.getProject()).shouldInspect(psiRoot)) { continue; } outElements.add(psiRoot); psiRoot.accept(visitor); appendDialects(psiRoot, processedLanguages, outDialects); } } private static void appendDialects(@NotNull PsiElement element, @NotNull Set<? super Language> outProcessedLanguages, @NotNull Set<? super String> outDialectIds) { Language language = element.getLanguage(); outDialectIds.add(language.getID()); if (outProcessedLanguages.add(language)) { for (Language dialect : language.getDialects()) { outDialectIds.add(dialect.getID()); } } } @NotNull List<LocalInspectionToolWrapper> getInspectionTools(@NotNull InspectionProfileWrapper profile) { List<InspectionToolWrapper<?, ?>> toolWrappers = profile.getInspectionProfile().getInspectionTools(getFile()); InspectionProfileWrapper.checkInspectionsDuplicates(toolWrappers); List<LocalInspectionToolWrapper> enabled = new ArrayList<>(); for (InspectionToolWrapper<?, ?> toolWrapper : toolWrappers) { ProgressManager.checkCanceled(); if (toolWrapper instanceof LocalInspectionToolWrapper && !isAcceptableLocalTool((LocalInspectionToolWrapper)toolWrapper)) { continue; } final HighlightDisplayKey key = toolWrapper.getDisplayKey(); if (!profile.isToolEnabled(key, getFile())) continue; if (HighlightDisplayLevel.DO_NOT_SHOW.equals(profile.getErrorLevel(key, getFile()))) continue; LocalInspectionToolWrapper wrapper; if (toolWrapper instanceof LocalInspectionToolWrapper) { wrapper = (LocalInspectionToolWrapper)toolWrapper; } else { wrapper = ((GlobalInspectionToolWrapper)toolWrapper).getSharedLocalInspectionToolWrapper(); if (wrapper == null || !isAcceptableLocalTool(wrapper)) continue; } String language = wrapper.getLanguage(); if (language != null && Language.findLanguageByID(language) == null) { continue; // filter out at least unknown languages } if (myIgnoreSuppressed && wrapper.getTool().isSuppressedFor(getFile())) { continue; } enabled.add(wrapper); } return enabled; } protected boolean isAcceptableLocalTool(@NotNull LocalInspectionToolWrapper wrapper) { return true; } private void doInspectInjectedPsi(@NotNull PsiFile injectedPsi, final boolean isOnTheFly, final @NotNull ProgressIndicator indicator, @NotNull InspectionManager iManager, final boolean inVisibleRange, @NotNull List<? extends LocalInspectionToolWrapper> wrappers) { final PsiElement host = InjectedLanguageManager.getInstance(injectedPsi.getProject()).getInjectionHost(injectedPsi); List<PsiElement> elements = new ArrayList<>(); Set<String> elementDialectIds = new SmartHashSet<>(); getElementsAndDialectsFrom(injectedPsi, elements, elementDialectIds); if (elements.isEmpty()) { return; } List<LocalInspectionToolWrapper> applicableTools = InspectionEngine.filterToolsApplicableByLanguage(wrappers, elementDialectIds); for (LocalInspectionToolWrapper wrapper : applicableTools) { ProgressManager.checkCanceled(); final LocalInspectionTool tool = wrapper.getTool(); ProblemsHolder holder = new ProblemsHolder(iManager, injectedPsi, isOnTheFly) { @Override public void registerProblem(@NotNull ProblemDescriptor descriptor) { if (host != null && myIgnoreSuppressed && tool.isSuppressedFor(host)) { registerSuppressedElements(host, wrapper.getID(), wrapper.getAlternativeID()); return; } super.registerProblem(descriptor); if (isOnTheFly && inVisibleRange) { addDescriptorIncrementally(descriptor, wrapper, indicator); } } }; LocalInspectionToolSession injSession = new LocalInspectionToolSession(injectedPsi, 0, injectedPsi.getTextLength()); InspectionEngine.createVisitorAndAcceptElements(tool, holder, isOnTheFly, injSession, elements); tool.inspectionFinished(injSession, holder); List<ProblemDescriptor> problems = holder.getResults(); if (!problems.isEmpty()) { appendDescriptors(injectedPsi, problems, wrapper); } } } @Override public @NotNull List<HighlightInfo> getInfos() { return myInfos; } private static final class InspectionResult { private final @NotNull LocalInspectionToolWrapper tool; private final @NotNull List<? extends ProblemDescriptor> foundProblems; private InspectionResult(@NotNull LocalInspectionToolWrapper tool, @NotNull List<? extends ProblemDescriptor> foundProblems) { this.tool = tool; this.foundProblems = new ArrayList<>(foundProblems); } } private static final class InspectionContext { private InspectionContext(@NotNull LocalInspectionToolWrapper tool, @NotNull ProblemsHolder holder, int problemsSize, // need this to diff between found problems in visible part and the rest @NotNull PsiElementVisitor visitor) { this.tool = tool; this.holder = holder; this.problemsSize = problemsSize; this.visitor = visitor; } private final @NotNull LocalInspectionToolWrapper tool; private final @NotNull ProblemsHolder holder; private final int problemsSize; private final @NotNull PsiElementVisitor visitor; } public static class InspectionHighlightInfoType extends HighlightInfoType.HighlightInfoTypeImpl { InspectionHighlightInfoType(@NotNull HighlightInfoType level, @NotNull PsiElement element) { super(level.getSeverity(element), level.getAttributesKey()); } } private static @Nls String getPresentableNameText() { return DaemonBundle.message("pass.inspection"); } }
package org.jkiss.dbeaver.registry; import org.eclipse.core.resources.IContainer; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IResource; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IProgressMonitor; import org.jkiss.code.NotNull; import org.jkiss.code.Nullable; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.model.*; import org.jkiss.dbeaver.model.connection.DBPConnectionBootstrap; import org.jkiss.dbeaver.model.connection.DBPConnectionConfiguration; import org.jkiss.dbeaver.model.connection.DBPConnectionEventType; import org.jkiss.dbeaver.model.connection.DBPConnectionType; import org.jkiss.dbeaver.model.impl.preferences.SimplePreferenceStore; import org.jkiss.dbeaver.model.net.DBWHandlerConfiguration; import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor; import org.jkiss.dbeaver.model.runtime.DBRRunnableWithProgress; import org.jkiss.dbeaver.model.runtime.DBRShellCommand; import org.jkiss.dbeaver.model.runtime.VoidProgressMonitor; import org.jkiss.dbeaver.model.struct.DBSObject; import org.jkiss.dbeaver.model.struct.DBSObjectFilter; import org.jkiss.dbeaver.model.struct.rdb.DBSCatalog; import org.jkiss.dbeaver.model.struct.rdb.DBSSchema; import org.jkiss.dbeaver.registry.driver.DriverDescriptor; import org.jkiss.dbeaver.registry.encode.EncryptionException; import org.jkiss.dbeaver.registry.encode.PasswordEncrypter; import org.jkiss.dbeaver.registry.encode.SimpleStringEncrypter; import org.jkiss.dbeaver.registry.network.NetworkHandlerDescriptor; import org.jkiss.dbeaver.registry.network.NetworkHandlerRegistry; import org.jkiss.dbeaver.runtime.RuntimeUtils; import org.jkiss.dbeaver.utils.ContentUtils; import org.jkiss.dbeaver.utils.GeneralUtils; import org.jkiss.utils.CommonUtils; import org.jkiss.utils.xml.SAXListener; import org.jkiss.utils.xml.SAXReader; import org.jkiss.utils.xml.XMLBuilder; import org.jkiss.utils.xml.XMLException; import org.xml.sax.Attributes; import java.io.*; import java.lang.reflect.InvocationTargetException; import java.util.*; public class DataSourceRegistry implements DBPDataSourceRegistry { @Deprecated public static final String DEFAULT_AUTO_COMMIT = "default.autocommit"; //$NON-NLS-1$ @Deprecated public static final String DEFAULT_ISOLATION = "default.isolation"; //$NON-NLS-1$ @Deprecated public static final String DEFAULT_ACTIVE_OBJECT = "default.activeObject"; //$NON-NLS-1$ static final Log log = Log.getLog(DataSourceRegistry.class); public static final String OLD_CONFIG_FILE_NAME = "data-sources.xml"; //$NON-NLS-1$ private final DBPApplication application; private final IProject project; private final List<DataSourceDescriptor> dataSources = new ArrayList<>(); private final List<DBPEventListener> dataSourceListeners = new ArrayList<>(); public DataSourceRegistry(DBPApplication application, IProject project) { this.application = application; this.project = project; loadDataSources(false); DataSourceProviderRegistry.getInstance().fireRegistryChange(this, true); } public void dispose() { DataSourceProviderRegistry.getInstance().fireRegistryChange(this, false); synchronized (dataSourceListeners) { if (!this.dataSourceListeners.isEmpty()) { log.warn("Some data source listeners are still registered: " + dataSourceListeners); } this.dataSourceListeners.clear(); } // Disconnect in 2 seconds or die closeConnections(DBConstants.DISCONNECT_TIMEOUT); // Do not save config on shutdown. // Some data source might be broken due to misconfiguration // and we don't want to loose their config just after restart // if (getProject().isOpen()) { // flushConfig(); // Dispose and clear all descriptors synchronized (dataSources) { for (DataSourceDescriptor dataSourceDescriptor : this.dataSources) { dataSourceDescriptor.dispose(); } this.dataSources.clear(); } } public void closeConnections(long waitTime) { boolean hasConnections = false; synchronized (dataSources) { for (DataSourceDescriptor dataSource : dataSources) { if (dataSource.isConnected()) { hasConnections = true; break; } } } if (!hasConnections) { return; } final DisconnectTask disconnectTask = new DisconnectTask(); if (!RuntimeUtils.runTask(disconnectTask, waitTime)) { log.warn("Some data source connections wasn't closed on shutdown in " + waitTime + "ms. Probably network timeout occurred."); } } @NotNull public DBPApplication getApplication() { return application; } // Data sources @Nullable @Override public DataSourceDescriptor getDataSource(String id) { synchronized (dataSources) { for (DataSourceDescriptor dsd : dataSources) { if (dsd.getId().equals(id)) { return dsd; } } } return null; } @Nullable @Override public DataSourceDescriptor getDataSource(DBPDataSource dataSource) { synchronized (dataSources) { for (DataSourceDescriptor dsd : dataSources) { if (dsd.getDataSource() == dataSource) { return dsd; } } } return null; } @Nullable @Override public DataSourceDescriptor findDataSourceByName(String name) { synchronized (dataSources) { for (DataSourceDescriptor dsd : dataSources) { if (dsd.getName().equals(name)) { return dsd; } } } return null; } @Override public List<DataSourceDescriptor> getDataSources() { List<DataSourceDescriptor> dsCopy; synchronized (dataSources) { dsCopy = CommonUtils.copyList(dataSources); } Collections.sort(dsCopy, new Comparator<DataSourceDescriptor>() { @Override public int compare(DataSourceDescriptor o1, DataSourceDescriptor o2) { return o1.getName().compareToIgnoreCase(o2.getName()); } }); return dsCopy; } public void addDataSource(DBPDataSourceContainer dataSource) { synchronized (dataSources) { this.dataSources.add((DataSourceDescriptor) dataSource); } this.saveDataSources(); this.fireDataSourceEvent(DBPEvent.Action.OBJECT_ADD, dataSource); } public void removeDataSource(DBPDataSourceContainer dataSource) { synchronized (dataSources) { this.dataSources.remove(dataSource); } this.saveDataSources(); try { this.fireDataSourceEvent(DBPEvent.Action.OBJECT_REMOVE, dataSource); } finally { ((DataSourceDescriptor)dataSource).dispose(); } } public void updateDataSource(DBPDataSourceContainer dataSource) { this.saveDataSources(); this.fireDataSourceEvent(DBPEvent.Action.OBJECT_UPDATE, dataSource); } @Override public void flushConfig() { this.saveDataSources(); } @Override public void refreshConfig() { this.loadDataSources(true); } @Override public void addDataSourceListener(DBPEventListener listener) { synchronized (dataSourceListeners) { dataSourceListeners.add(listener); } } @Override public boolean removeDataSourceListener(DBPEventListener listener) { synchronized (dataSourceListeners) { return dataSourceListeners.remove(listener); } } public void fireDataSourceEvent( DBPEvent.Action action, DBSObject object) { notifyDataSourceListeners(new DBPEvent(action, object)); } public void notifyDataSourceListeners(final DBPEvent event) { if (dataSourceListeners.isEmpty()) { return; } final List<DBPEventListener> listeners; synchronized (dataSourceListeners) { listeners = new ArrayList<>(dataSourceListeners); } for (DBPEventListener listener : listeners) { listener.handleDataSourceEvent(event); } } private void loadDataSources(boolean refresh) { ParseResults parseResults = new ParseResults(); try { for (IResource res : project.members(IContainer.INCLUDE_HIDDEN)) { if (res instanceof IFile) { IFile file = (IFile) res; if (res.getName().startsWith(CONFIG_FILE_PREFIX) && res.getName().endsWith(CONFIG_FILE_EXT)) { if (file.exists()) { File dsFile = file.getLocation().toFile(); if (dsFile.exists()) { loadDataSources(dsFile, new SimpleStringEncrypter(), refresh, parseResults); } } } } } } catch (CoreException e) { log.error("Error reading datasources configuration", e); } // Reflect changes if (refresh) { for (DataSourceDescriptor ds : parseResults.updatedDataSources) { fireDataSourceEvent(DBPEvent.Action.OBJECT_UPDATE, ds); } for (DataSourceDescriptor ds : parseResults.addedDataSources) { fireDataSourceEvent(DBPEvent.Action.OBJECT_ADD, ds); } List<DataSourceDescriptor> removedDataSource = new ArrayList<>(); for (DataSourceDescriptor ds : dataSources) { if (!parseResults.addedDataSources.contains(ds) && !parseResults.updatedDataSources.contains(ds)) { removedDataSource.add(ds); } } for (DataSourceDescriptor ds : removedDataSource) { this.dataSources.remove(ds); this.fireDataSourceEvent(DBPEvent.Action.OBJECT_REMOVE, ds); ds.dispose(); } } } private void loadDataSources(File fromFile, PasswordEncrypter encrypter, boolean refresh, ParseResults parseResults) { if (!fromFile.exists()) { return; } boolean extraConfig = !fromFile.getName().equalsIgnoreCase(CONFIG_FILE_NAME); try { InputStream is = new FileInputStream(fromFile); try { try { loadDataSources(is, encrypter, extraConfig, refresh, parseResults); } catch (DBException ex) { log.warn("Error loading datasource config from " + fromFile.getAbsolutePath(), ex); } finally { is.close(); } } catch (IOException ex) { log.warn("IO error", ex); } finally { ContentUtils.close(is); } } catch (IOException e) { log.warn("Can't load config file " + fromFile.getAbsolutePath(), e); } } private void loadDataSources(InputStream is, PasswordEncrypter encrypter, boolean extraConfig, boolean refresh, ParseResults parseResults) throws DBException, IOException { SAXReader parser = new SAXReader(is); try { final DataSourcesParser dsp = new DataSourcesParser(extraConfig, refresh, parseResults, encrypter); parser.parse(dsp); } catch (XMLException ex) { throw new DBException("Datasource config parse error", ex); } } void saveDataSources() { List<DataSourceDescriptor> localDataSources; synchronized (dataSources) { localDataSources = CommonUtils.copyList(dataSources); } IProgressMonitor progressMonitor = VoidProgressMonitor.INSTANCE.getNestedMonitor(); PasswordEncrypter encrypter = new SimpleStringEncrypter(); IFile configFile = getProject().getFile(CONFIG_FILE_NAME); try { if (localDataSources.isEmpty()) { configFile.delete(true, false, progressMonitor); } else { // Save in temp memory to be safe (any error during direct write will corrupt configuration) ByteArrayOutputStream tempStream = new ByteArrayOutputStream(10000); try { XMLBuilder xml = new XMLBuilder(tempStream, GeneralUtils.DEFAULT_FILE_CHARSET_NAME); xml.setButify(true); xml.startElement("data-sources"); for (DataSourceDescriptor dataSource : localDataSources) { if (!dataSource.isProvided()) { saveDataSource(xml, dataSource, encrypter); } } xml.endElement(); xml.flush(); } catch (IOException ex) { log.warn("IO error while saving datasources", ex); } InputStream ifs = new ByteArrayInputStream(tempStream.toByteArray()); if (!configFile.exists()) { configFile.create(ifs, true, progressMonitor); configFile.setHidden(true); } else { configFile.setContents(ifs, true, false, progressMonitor); } } } catch (CoreException ex) { log.error("Error saving datasources configuration", ex); } } private void saveDataSource(XMLBuilder xml, DataSourceDescriptor dataSource, PasswordEncrypter encrypter) throws IOException { xml.startElement(RegistryConstants.TAG_DATA_SOURCE); xml.addAttribute(RegistryConstants.ATTR_ID, dataSource.getId()); xml.addAttribute(RegistryConstants.ATTR_PROVIDER, dataSource.getDriver().getProviderDescriptor().getId()); xml.addAttribute(RegistryConstants.ATTR_DRIVER, dataSource.getDriver().getId()); xml.addAttribute(RegistryConstants.ATTR_NAME, dataSource.getName()); xml.addAttribute(RegistryConstants.ATTR_CREATE_DATE, dataSource.getCreateDate().getTime()); if (dataSource.getUpdateDate() != null) { xml.addAttribute(RegistryConstants.ATTR_UPDATE_DATE, dataSource.getUpdateDate().getTime()); } if (dataSource.getLoginDate() != null) { xml.addAttribute(RegistryConstants.ATTR_LOGIN_DATE, dataSource.getLoginDate().getTime()); } xml.addAttribute(RegistryConstants.ATTR_SAVE_PASSWORD, dataSource.isSavePassword()); xml.addAttribute(RegistryConstants.ATTR_SHOW_SYSTEM_OBJECTS, dataSource.isShowSystemObjects()); xml.addAttribute(RegistryConstants.ATTR_READ_ONLY, dataSource.isConnectionReadOnly()); if (!CommonUtils.isEmpty(dataSource.getFolderPath())) { xml.addAttribute(RegistryConstants.ATTR_FOLDER, dataSource.getFolderPath()); } { // Connection info DBPConnectionConfiguration connectionInfo = dataSource.getConnectionConfiguration(); xml.startElement(RegistryConstants.TAG_CONNECTION); if (!CommonUtils.isEmpty(connectionInfo.getHostName())) { xml.addAttribute(RegistryConstants.ATTR_HOST, connectionInfo.getHostName()); } if (!CommonUtils.isEmpty(connectionInfo.getHostPort())) { xml.addAttribute(RegistryConstants.ATTR_PORT, connectionInfo.getHostPort()); } xml.addAttribute(RegistryConstants.ATTR_SERVER, CommonUtils.notEmpty(connectionInfo.getServerName())); xml.addAttribute(RegistryConstants.ATTR_DATABASE, CommonUtils.notEmpty(connectionInfo.getDatabaseName())); xml.addAttribute(RegistryConstants.ATTR_URL, CommonUtils.notEmpty(connectionInfo.getUrl())); xml.addAttribute(RegistryConstants.ATTR_USER, CommonUtils.notEmpty(connectionInfo.getUserName())); if (dataSource.isSavePassword() && !CommonUtils.isEmpty(connectionInfo.getUserPassword())) { String encPassword = connectionInfo.getUserPassword(); if (!CommonUtils.isEmpty(encPassword)) { try { encPassword = encrypter.encrypt(encPassword); } catch (EncryptionException e) { log.error("Can't encrypt password. Save it as is", e); } } xml.addAttribute(RegistryConstants.ATTR_PASSWORD, encPassword); } if (!CommonUtils.isEmpty(connectionInfo.getClientHomeId())) { xml.addAttribute(RegistryConstants.ATTR_HOME, connectionInfo.getClientHomeId()); } if (connectionInfo.getConnectionType() != null) { xml.addAttribute(RegistryConstants.ATTR_TYPE, connectionInfo.getConnectionType().getId()); } if (connectionInfo.getConnectionColor() != null) { xml.addAttribute(RegistryConstants.ATTR_COLOR, connectionInfo.getConnectionColor()); } if (connectionInfo.getProperties() != null) { for (Map.Entry<Object, Object> entry : connectionInfo.getProperties().entrySet()) { xml.startElement(RegistryConstants.TAG_PROPERTY); xml.addAttribute(RegistryConstants.ATTR_NAME, CommonUtils.toString(entry.getKey())); xml.addAttribute(RegistryConstants.ATTR_VALUE, CommonUtils.toString(entry.getValue())); xml.endElement(); } } // Save events for (DBPConnectionEventType eventType : connectionInfo.getDeclaredEvents()) { DBRShellCommand command = connectionInfo.getEvent(eventType); xml.startElement(RegistryConstants.TAG_EVENT); xml.addAttribute(RegistryConstants.ATTR_TYPE, eventType.name()); xml.addAttribute(RegistryConstants.ATTR_ENABLED, command.isEnabled()); xml.addAttribute(RegistryConstants.ATTR_SHOW_PANEL, command.isShowProcessPanel()); xml.addAttribute(RegistryConstants.ATTR_WAIT_PROCESS, command.isWaitProcessFinish()); xml.addAttribute(RegistryConstants.ATTR_TERMINATE_AT_DISCONNECT, command.isTerminateAtDisconnect()); xml.addText(command.getCommand()); xml.endElement(); } // Save network handlers' configurations for (DBWHandlerConfiguration configuration : connectionInfo.getDeclaredHandlers()) { xml.startElement(RegistryConstants.TAG_NETWORK_HANDLER); xml.addAttribute(RegistryConstants.ATTR_TYPE, configuration.getType().name()); xml.addAttribute(RegistryConstants.ATTR_ID, CommonUtils.notEmpty(configuration.getId())); xml.addAttribute(RegistryConstants.ATTR_ENABLED, configuration.isEnabled()); xml.addAttribute(RegistryConstants.ATTR_USER, CommonUtils.notEmpty(configuration.getUserName())); xml.addAttribute(RegistryConstants.ATTR_SAVE_PASSWORD, configuration.isSavePassword()); if (configuration.isSavePassword() && !CommonUtils.isEmpty(configuration.getPassword())) { String encPassword = configuration.getPassword(); if (!CommonUtils.isEmpty(encPassword)) { try { encPassword = encrypter.encrypt(encPassword); } catch (EncryptionException e) { log.error("Can't encrypt password. Save it as is", e); } } xml.addAttribute(RegistryConstants.ATTR_PASSWORD, encPassword); } for (Map.Entry<String, String> entry : configuration.getProperties().entrySet()) { xml.startElement(RegistryConstants.TAG_PROPERTY); xml.addAttribute(RegistryConstants.ATTR_NAME, entry.getKey()); xml.addAttribute(RegistryConstants.ATTR_VALUE, CommonUtils.notEmpty(entry.getValue())); xml.endElement(); } xml.endElement(); } // Save bootstrap info { DBPConnectionBootstrap bootstrap = connectionInfo.getBootstrap(); xml.startElement(RegistryConstants.TAG_BOOTSTRAP); if (bootstrap.getDefaultAutoCommit() != null) { xml.addAttribute(RegistryConstants.ATTR_AUTOCOMMIT, bootstrap.getDefaultAutoCommit()); } if (bootstrap.getDefaultTransactionIsolation() != null) { xml.addAttribute(RegistryConstants.ATTR_TXN_ISOLATION, bootstrap.getDefaultTransactionIsolation()); } if (!CommonUtils.isEmpty(bootstrap.getDefaultObjectName())) { xml.addAttribute(RegistryConstants.ATTR_DEFAULT_OBJECT, bootstrap.getDefaultObjectName()); } if (bootstrap.isIgnoreErrors()) { xml.addAttribute(RegistryConstants.ATTR_IGNORE_ERRORS, true); } for (String query : bootstrap.getInitQueries()) { xml.startElement(RegistryConstants.TAG_QUERY); xml.addText(query); xml.endElement(); } xml.endElement(); } xml.endElement(); } { // Filters Collection<DataSourceDescriptor.FilterMapping> filterMappings = dataSource.getObjectFilters(); if (!CommonUtils.isEmpty(filterMappings)) { xml.startElement(RegistryConstants.TAG_FILTERS); for (DataSourceDescriptor.FilterMapping filter : filterMappings) { if (filter.defaultFilter != null) { saveObjectFiler(xml, filter.typeName, null, filter.defaultFilter); } for (Map.Entry<String,DBSObjectFilter> cf : filter.customFilters.entrySet()) { saveObjectFiler(xml, filter.typeName, cf.getKey(), cf.getValue()); } } xml.endElement(); } } // Virtual model if (dataSource.getVirtualModel().hasValuableData()) { xml.startElement(RegistryConstants.TAG_VIRTUAL_META_DATA); dataSource.getVirtualModel().serialize(xml); xml.endElement(); } // Preferences { // Save only properties who are differs from default values SimplePreferenceStore prefStore = dataSource.getPreferenceStore(); for (String propName : prefStore.preferenceNames()) { String propValue = prefStore.getString(propName); String defValue = prefStore.getDefaultString(propName); if (propValue == null) { continue; } xml.startElement(RegistryConstants.TAG_CUSTOM_PROPERTY); xml.addAttribute(RegistryConstants.ATTR_NAME, propName); xml.addAttribute(RegistryConstants.ATTR_VALUE, propValue); xml.endElement(); } } //xml.addText(CommonUtils.getString(dataSource.getDescription())); xml.endElement(); } private void saveObjectFiler(XMLBuilder xml, String typeName, String objectID, DBSObjectFilter filter) throws IOException { xml.startElement(RegistryConstants.TAG_FILTER); xml.addAttribute(RegistryConstants.ATTR_TYPE, typeName); if (objectID != null) { xml.addAttribute(RegistryConstants.ATTR_ID, objectID); } if (!CommonUtils.isEmpty(filter.getName())) { xml.addAttribute(RegistryConstants.ATTR_NAME, filter.getName()); } if (!CommonUtils.isEmpty(filter.getDescription())) { xml.addAttribute(RegistryConstants.ATTR_DESCRIPTION, filter.getDescription()); } if (!filter.isEnabled()) { xml.addAttribute(RegistryConstants.ATTR_ENABLED, false); } for (String include : CommonUtils.safeCollection(filter.getInclude())) { xml.startElement(RegistryConstants.TAG_INCLUDE); xml.addAttribute(RegistryConstants.ATTR_NAME, include); xml.endElement(); } for (String exclude : CommonUtils.safeCollection(filter.getExclude())) { xml.startElement(RegistryConstants.TAG_EXCLUDE); xml.addAttribute(RegistryConstants.ATTR_NAME, exclude); xml.endElement(); } xml.endElement(); } @Override public IProject getProject() { return project; } private static class ParseResults { Set<DataSourceDescriptor> updatedDataSources = new HashSet<>(); Set<DataSourceDescriptor> addedDataSources = new HashSet<>(); } private class DataSourcesParser implements SAXListener { DataSourceDescriptor curDataSource; boolean extraConfig; boolean refresh; PasswordEncrypter encrypter; boolean isDescription = false; DBRShellCommand curCommand = null; private DBWHandlerConfiguration curNetworkHandler; private DBSObjectFilter curFilter; private StringBuilder curQuery; private ParseResults parseResults; private DataSourcesParser(boolean extraConfig, boolean refresh, ParseResults parseResults, PasswordEncrypter encrypter) { this.extraConfig = extraConfig; this.refresh = refresh; this.parseResults = parseResults; this.encrypter = encrypter; } @Override public void saxStartElement(SAXReader reader, String namespaceURI, String localName, Attributes atts) throws XMLException { isDescription = false; curCommand = null; switch (localName) { case RegistryConstants.TAG_DATA_SOURCE: { String name = atts.getValue(RegistryConstants.ATTR_NAME); String id = atts.getValue(RegistryConstants.ATTR_ID); if (id == null) { // Support of old version without ID id = name; } String providerId = atts.getValue(RegistryConstants.ATTR_PROVIDER); DataSourceProviderDescriptor provider = DataSourceProviderRegistry.getInstance().getDataSourceProvider(providerId); if (provider == null) { log.warn("Can't find datasource provider " + providerId + " for datasource '" + name + "'"); curDataSource = null; reader.setListener(EMPTY_LISTENER); return; } String driverId = atts.getValue(RegistryConstants.ATTR_DRIVER); DriverDescriptor driver = provider.getDriver(driverId); if (driver == null) { log.warn("Can't find driver " + driverId + " in datasource provider " + provider.getId() + " for datasource '" + name + "'. Create new driver"); driver = provider.createDriver(driverId); provider.addDriver(driver); } curDataSource = getDataSource(id); boolean newDataSource = (curDataSource == null); if (newDataSource) { curDataSource = new DataSourceDescriptor( DataSourceRegistry.this, id, driver, new DBPConnectionConfiguration()); } if (extraConfig) { curDataSource.setProvided(true); } curDataSource.setName(name); String createDate = atts.getValue(RegistryConstants.ATTR_CREATE_DATE); if (!CommonUtils.isEmpty(createDate)) { curDataSource.setCreateDate(new Date(Long.parseLong(createDate))); } String updateDate = atts.getValue(RegistryConstants.ATTR_UPDATE_DATE); if (!CommonUtils.isEmpty(updateDate)) { curDataSource.setUpdateDate(new Date(Long.parseLong(updateDate))); } String loginDate = atts.getValue(RegistryConstants.ATTR_LOGIN_DATE); if (!CommonUtils.isEmpty(loginDate)) { curDataSource.setLoginDate(new Date(Long.parseLong(loginDate))); } curDataSource.setSavePassword(CommonUtils.getBoolean(atts.getValue(RegistryConstants.ATTR_SAVE_PASSWORD))); curDataSource.setShowSystemObjects(CommonUtils.getBoolean(atts.getValue(RegistryConstants.ATTR_SHOW_SYSTEM_OBJECTS))); curDataSource.setConnectionReadOnly(CommonUtils.getBoolean(atts.getValue(RegistryConstants.ATTR_READ_ONLY))); curDataSource.setFolderPath(atts.getValue(RegistryConstants.ATTR_FOLDER)); { // Legacy filter settings String legacyCatalogFilter = atts.getValue(RegistryConstants.ATTR_FILTER_CATALOG); if (!CommonUtils.isEmpty(legacyCatalogFilter)) { curDataSource.updateObjectFilter(DBSCatalog.class.getName(), null, new DBSObjectFilter(legacyCatalogFilter, null)); } String legacySchemaFilter = atts.getValue(RegistryConstants.ATTR_FILTER_SCHEMA); if (!CommonUtils.isEmpty(legacySchemaFilter)) { curDataSource.updateObjectFilter(DBSSchema.class.getName(), null, new DBSObjectFilter(legacySchemaFilter, null)); } } if (newDataSource) { dataSources.add(curDataSource); parseResults.addedDataSources.add(curDataSource); } else { parseResults.updatedDataSources.add(curDataSource); } break; } case RegistryConstants.TAG_CONNECTION: if (curDataSource != null) { DriverDescriptor driver = curDataSource.getDriver(); if (CommonUtils.isEmpty(driver.getName())) { // Broken driver - seems to be just created driver.setName(atts.getValue(RegistryConstants.ATTR_URL)); driver.setDriverClassName("java.sql.Driver"); } DBPConnectionConfiguration config = curDataSource.getConnectionConfiguration(); config.setHostName(atts.getValue(RegistryConstants.ATTR_HOST)); config.setHostPort(atts.getValue(RegistryConstants.ATTR_PORT)); config.setServerName(atts.getValue(RegistryConstants.ATTR_SERVER)); config.setDatabaseName(atts.getValue(RegistryConstants.ATTR_DATABASE)); config.setUrl(atts.getValue(RegistryConstants.ATTR_URL)); config.setUserName(atts.getValue(RegistryConstants.ATTR_USER)); config.setUserPassword(decryptPassword(atts.getValue(RegistryConstants.ATTR_PASSWORD))); config.setClientHomeId(atts.getValue(RegistryConstants.ATTR_HOME)); config.setConnectionType( DataSourceProviderRegistry.getInstance().getConnectionType( CommonUtils.toString(atts.getValue(RegistryConstants.ATTR_TYPE)), DBPConnectionType.DEFAULT_TYPE) ); String colorValue = atts.getValue(RegistryConstants.ATTR_COLOR); if (!CommonUtils.isEmpty(colorValue)) { config.setConnectionColor(colorValue); } } break; case RegistryConstants.TAG_BOOTSTRAP: if (curDataSource != null) { DBPConnectionConfiguration config = curDataSource.getConnectionConfiguration(); if (atts.getValue(RegistryConstants.ATTR_AUTOCOMMIT) != null) { config.getBootstrap().setDefaultAutoCommit(CommonUtils.toBoolean(atts.getValue(RegistryConstants.ATTR_AUTOCOMMIT))); } if (atts.getValue(RegistryConstants.ATTR_TXN_ISOLATION) != null) { config.getBootstrap().setDefaultTransactionIsolation(CommonUtils.toInt(atts.getValue(RegistryConstants.ATTR_TXN_ISOLATION))); } if (!CommonUtils.isEmpty(atts.getValue(RegistryConstants.ATTR_DEFAULT_OBJECT))) { config.getBootstrap().setDefaultObjectName(atts.getValue(RegistryConstants.ATTR_DEFAULT_OBJECT)); } if (atts.getValue(RegistryConstants.ATTR_IGNORE_ERRORS) != null) { config.getBootstrap().setIgnoreErrors(CommonUtils.toBoolean(atts.getValue(RegistryConstants.ATTR_IGNORE_ERRORS))); } } break; case RegistryConstants.TAG_QUERY: curQuery = new StringBuilder(); break; case RegistryConstants.TAG_PROPERTY: if (curNetworkHandler != null) { curNetworkHandler.getProperties().put( atts.getValue(RegistryConstants.ATTR_NAME), atts.getValue(RegistryConstants.ATTR_VALUE)); } else if (curDataSource != null) { curDataSource.getConnectionConfiguration().setProperty( atts.getValue(RegistryConstants.ATTR_NAME), atts.getValue(RegistryConstants.ATTR_VALUE)); } break; case RegistryConstants.TAG_EVENT: if (curDataSource != null) { DBPConnectionEventType eventType = DBPConnectionEventType.valueOf(atts.getValue(RegistryConstants.ATTR_TYPE)); curCommand = new DBRShellCommand(""); curCommand.setEnabled(CommonUtils.getBoolean(atts.getValue(RegistryConstants.ATTR_ENABLED))); curCommand.setShowProcessPanel(CommonUtils.getBoolean(atts.getValue(RegistryConstants.ATTR_SHOW_PANEL))); curCommand.setWaitProcessFinish(CommonUtils.getBoolean(atts.getValue(RegistryConstants.ATTR_WAIT_PROCESS))); curCommand.setTerminateAtDisconnect(CommonUtils.getBoolean(atts.getValue(RegistryConstants.ATTR_TERMINATE_AT_DISCONNECT))); curDataSource.getConnectionConfiguration().setEvent(eventType, curCommand); } break; case RegistryConstants.TAG_CUSTOM_PROPERTY: if (curDataSource != null) { String propName = atts.getValue(RegistryConstants.ATTR_NAME); String propValue = atts.getValue(RegistryConstants.ATTR_VALUE); // TODO: remove bootstrap preferences later. PResent for config backward compatibility if (propName.equals(DEFAULT_AUTO_COMMIT)) { curDataSource.getConnectionConfiguration().getBootstrap().setDefaultAutoCommit(CommonUtils.toBoolean(propValue)); } else if (propName.equals(DEFAULT_ISOLATION)) { curDataSource.getConnectionConfiguration().getBootstrap().setDefaultTransactionIsolation(CommonUtils.toInt(propValue)); } else if (propName.equals(DEFAULT_ACTIVE_OBJECT)) { if (!CommonUtils.isEmpty(propValue)) { curDataSource.getConnectionConfiguration().getBootstrap().setDefaultObjectName(propValue); } } else { curDataSource.getPreferenceStore().getProperties().put(propName, propValue); } } break; case RegistryConstants.TAG_NETWORK_HANDLER: if (curDataSource != null) { String handlerId = atts.getValue(RegistryConstants.ATTR_ID); NetworkHandlerDescriptor handlerDescriptor = NetworkHandlerRegistry.getInstance().getDescriptor(handlerId); if (handlerDescriptor == null) { log.warn("Can't find network handler '" + handlerId + "'"); reader.setListener(EMPTY_LISTENER); return; } curNetworkHandler = new DBWHandlerConfiguration(handlerDescriptor, curDataSource.getDriver()); curNetworkHandler.setEnabled(CommonUtils.getBoolean(atts.getValue(RegistryConstants.ATTR_ENABLED))); curNetworkHandler.setUserName(CommonUtils.notEmpty(atts.getValue(RegistryConstants.ATTR_USER))); curNetworkHandler.setSavePassword(CommonUtils.getBoolean(atts.getValue(RegistryConstants.ATTR_SAVE_PASSWORD))); curNetworkHandler.setPassword(decryptPassword(atts.getValue(RegistryConstants.ATTR_PASSWORD))); curDataSource.getConnectionConfiguration().addHandler(curNetworkHandler); } break; case RegistryConstants.TAG_FILTER: if (curDataSource != null) { String typeName = atts.getValue(RegistryConstants.ATTR_TYPE); String objectID = atts.getValue(RegistryConstants.ATTR_ID); if (typeName != null) { curFilter = new DBSObjectFilter(); curFilter.setName(atts.getValue(RegistryConstants.ATTR_NAME)); curFilter.setDescription(atts.getValue(RegistryConstants.ATTR_DESCRIPTION)); curFilter.setEnabled(CommonUtils.getBoolean(atts.getValue(RegistryConstants.ATTR_ENABLED), true)); curDataSource.updateObjectFilter(typeName, objectID, curFilter); } } break; case RegistryConstants.TAG_INCLUDE: if (curFilter != null) { curFilter.addInclude(CommonUtils.notEmpty(atts.getValue(RegistryConstants.ATTR_NAME))); } break; case RegistryConstants.TAG_EXCLUDE: if (curFilter != null) { curFilter.addExclude(CommonUtils.notEmpty(atts.getValue(RegistryConstants.ATTR_NAME))); } break; case RegistryConstants.TAG_DESCRIPTION: isDescription = true; break; case RegistryConstants.TAG_VIRTUAL_META_DATA: if (curDataSource != null) { reader.setListener(curDataSource.getVirtualModel().getModelParser()); } break; } } @Override public void saxText(SAXReader reader, String data) throws XMLException { if (isDescription && curDataSource != null) { curDataSource.setDescription(data); } else if (curCommand != null) { curCommand.setCommand(data); curCommand = null; } else if (curQuery != null) { curQuery.append(data); } } @Override public void saxEndElement(SAXReader reader, String namespaceURI, String localName) throws XMLException { switch (localName) { case RegistryConstants.TAG_DATA_SOURCE: curDataSource = null; break; case RegistryConstants.TAG_NETWORK_HANDLER: curNetworkHandler = null; break; case RegistryConstants.TAG_FILTER: curFilter = null; break; case RegistryConstants.TAG_QUERY: if (curDataSource != null && curQuery != null && curQuery.length() > 0) { curDataSource.getConnectionConfiguration().getBootstrap().getInitQueries().add(curQuery.toString()); curQuery = null; } break; } isDescription = false; } @Nullable private String decryptPassword(String encPassword) { if (!CommonUtils.isEmpty(encPassword)) { try { encPassword = encrypter.decrypt(encPassword); } catch (Throwable e) { // could not decrypt - use as is encPassword = null; } } return encPassword; } } private class DisconnectTask implements DBRRunnableWithProgress { boolean disconnected; @Override public void run(DBRProgressMonitor monitor) throws InvocationTargetException, InterruptedException { List<DataSourceDescriptor> dsSnapshot; synchronized (dataSources) { dsSnapshot = CommonUtils.copyList(dataSources); } monitor.beginTask("Disconnect all databases", dsSnapshot.size()); try { for (DataSourceDescriptor dataSource : dsSnapshot) { if (monitor.isCanceled()) { break; } if (dataSource.isConnected()) { try { // Disconnect monitor.subTask("Disconnect from [" + dataSource.getName() + "]"); disconnected = dataSource.disconnect(monitor); } catch (Exception ex) { log.error("Can't shutdown data source '" + dataSource.getName() + "'", ex); } } monitor.worked(1); } } finally { monitor.done(); } } } }
package com.theoryinpractice.testng.configuration; import com.intellij.ExtensionPoints; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.coverage.CoverageDataManager; import com.intellij.coverage.CoverageSuite; import com.intellij.coverage.IDEACoverageRunner; import com.intellij.debugger.engine.DebuggerUtils; import com.intellij.execution.*; import com.intellij.execution.configurations.*; import com.intellij.execution.process.ProcessAdapter; import com.intellij.execution.process.ProcessEvent; import com.intellij.execution.process.ProcessHandler; import com.intellij.execution.runners.ExecutionEnvironment; import com.intellij.execution.runners.ProgramRunner; import com.intellij.execution.testframework.TestFrameworkRunningModel; import com.intellij.execution.testframework.TestSearchScope; import com.intellij.execution.ui.ConsoleViewContentType; import com.intellij.execution.util.JavaParametersUtil; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.PathManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.module.LanguageLevelUtil; import com.intellij.openapi.module.Module; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.Task; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.projectRoots.ex.JavaSdkUtil; import com.intellij.openapi.roots.LanguageLevelProjectExtension; import com.intellij.openapi.roots.ModuleRootManager; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.ui.MessageType; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.Getter; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.ToolWindowId; import com.intellij.openapi.wm.ToolWindowManager; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.PackageScope; import com.intellij.psi.search.searches.AllClassesSearch; import com.intellij.psi.search.searches.AnnotatedMembersSearch; import com.intellij.util.PathUtil; import com.theoryinpractice.testng.model.*; import com.theoryinpractice.testng.ui.TestNGConsoleView; import com.theoryinpractice.testng.ui.TestNGResults; import com.theoryinpractice.testng.ui.actions.RerunFailedTestsAction; import com.theoryinpractice.testng.util.TestNGUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.testng.IDEACoverageListener; import org.testng.TestNG; import org.testng.TestNGCommandLineArgs; import org.testng.annotations.AfterClass; import org.testng.remote.strprotocol.MessageHelper; import org.testng.xml.LaunchSuite; import org.testng.xml.Parser; import org.testng.xml.SuiteGenerator; import org.testng.xml.XmlSuite; import javax.swing.*; import java.io.*; import java.net.InetAddress; import java.net.ServerSocket; import java.net.UnknownHostException; import java.util.*; public class TestNGRunnableState extends JavaCommandLineState { private static final Logger LOG = Logger.getInstance("TestNG Runner"); private final ConfigurationPerRunnerSettings myConfigurationPerRunnerSettings; private final TestNGConfiguration config; private final RunnerSettings runnerSettings; private final IDEARemoteTestRunnerClient client; private int port; private String debugPort; private CoverageSuite myCurrentCoverageSuite; private File myTempFile; public TestNGRunnableState(ExecutionEnvironment environment, TestNGConfiguration config) { super(environment); this.runnerSettings = environment.getRunnerSettings(); myConfigurationPerRunnerSettings = environment.getConfigurationSettings(); this.config = config; //TODO need to narrow this down a bit //setModulesToCompile(ModuleManager.getInstance(config.getProject()).getModules()); client = new IDEARemoteTestRunnerClient(); // Want debugging? if (runnerSettings.getData() instanceof DebuggingRunnerData) { DebuggingRunnerData debuggingRunnerData = ((DebuggingRunnerData)runnerSettings.getData()); debugPort = debuggingRunnerData.getDebugPort(); if (debugPort.length() == 0) { try { debugPort = DebuggerUtils.getInstance().findAvailableDebugAddress(true); } catch (ExecutionException e) { LOG.error(e); } debuggingRunnerData.setDebugPort(debugPort); } debuggingRunnerData.setLocal(true); } } @Override public ExecutionResult execute(@NotNull final Executor executor, @NotNull final ProgramRunner runner) throws ExecutionException { final TestNGConsoleView console = new TestNGConsoleView(config, runnerSettings, myConfigurationPerRunnerSettings); console.initUI(); ProcessHandler processHandler = startProcess(); processHandler.addProcessListener(new ProcessAdapter() { @Override public void processTerminated(final ProcessEvent event) { client.stopTest(); if (myCurrentCoverageSuite != null) { CoverageDataManager coverageDataManager = CoverageDataManager.getInstance(config.getProject()); coverageDataManager.coverageGathered(myCurrentCoverageSuite); } SwingUtilities.invokeLater(new Runnable() { public void run() { final Project project = config.getProject(); if (project.isDisposed()) return; final TestFrameworkRunningModel model = console.getModel(); final TestNGResults resultsView = console.getResultsView(); final String testRunDebugId = console.getProperties().isDebug() ? ToolWindowId.DEBUG : ToolWindowId.RUN; final ToolWindowManager toolWindowManager = ToolWindowManager.getInstance(project); if (!Comparing.strEqual(toolWindowManager.getActiveToolWindowId(), testRunDebugId)) { toolWindowManager.notifyByBalloon(testRunDebugId, model == null || resultsView.getStatus() == MessageHelper.SKIPPED_TEST ? MessageType.WARNING : (resultsView.getStatus() == MessageHelper.FAILED_TEST ? MessageType.ERROR : MessageType.INFO), model == null ? "Tests were not started" : resultsView.getStatusLine(), null, null); } } }); } @Override public void startNotified(final ProcessEvent event) { TestNGRemoteListener listener = new TestNGRemoteListener(console); client.startListening(listener, listener, port); } @Override public void processWillTerminate(ProcessEvent event, boolean willBeDestroyed) { console.getResultsView().finish(); } @Override public void onTextAvailable(final ProcessEvent event, final Key outputType) { //we override this since we wrap the underlying console, and proxy the attach call, //so we never get a chance to intercept the text. console.print(event.getText(), ConsoleViewContentType.getConsoleViewType(outputType)); } }); console.attachToProcess(processHandler); RerunFailedTestsAction rerunFailedTestsAction = new RerunFailedTestsAction(console.getComponent()); rerunFailedTestsAction.init(console.getProperties(), runnerSettings, myConfigurationPerRunnerSettings); rerunFailedTestsAction.setModelProvider(new Getter<TestFrameworkRunningModel>() { public TestFrameworkRunningModel get() { return console.getModel(); } }); final DefaultExecutionResult result = new DefaultExecutionResult(console, processHandler); result.setRestartActions(rerunFailedTestsAction); return result; } @Override protected JavaParameters createJavaParameters() throws ExecutionException { final Project project = config.getProject(); final JavaParameters javaParameters = new JavaParameters(); javaParameters.setupEnvs(config.getPersistantData().getEnvs(), config.getPersistantData().PASS_PARENT_ENVS); javaParameters.getVMParametersList().add("-ea"); javaParameters.setMainClass("org.testng.RemoteTestNGStarter"); javaParameters.setWorkingDirectory(config.getProperty(RunJavaConfiguration.WORKING_DIRECTORY_PROPERTY)); javaParameters.getClassPath().add(PathUtil.getJarPathForClass(IDEACoverageListener.class)); //the next few lines are awkward for a reason, using compareTo for some reason causes a JVM class verification error! Module module = config.getConfigurationModule().getModule(); LanguageLevel effectiveLanguageLevel = module == null ? LanguageLevelProjectExtension.getInstance(project).getLanguageLevel() : LanguageLevelUtil.getEffectiveLanguageLevel(module); final boolean is15 = effectiveLanguageLevel != LanguageLevel.JDK_1_4 && effectiveLanguageLevel != LanguageLevel.JDK_1_3; LOG.info("Language level is " + effectiveLanguageLevel.toString()); LOG.info("is15 is " + is15); // Add plugin jars first... javaParameters.getClassPath().add(is15 ? PathUtil.getJarPathForClass(AfterClass.class) : //testng-jdk15.jar new File(PathManager.getPreinstalledPluginsPath(), "testng/lib-jdk14/testng-jdk14.jar") .getPath());//todo !do not hard code lib name! final boolean hasIDEACoverageEnabled = config.isCoverageEnabled() && config.getCoverageRunner() instanceof IDEACoverageRunner; // Configure rest of jars JavaParametersUtil.configureConfiguration(javaParameters, config); Sdk jdk = module == null ? ProjectRootManager.getInstance(project).getProjectJdk() : ModuleRootManager.getInstance(module).getSdk(); javaParameters.setJdk(jdk); final Object[] patchers = Extensions.getExtensions(ExtensionPoints.JUNIT_PATCHER); for (Object patcher : patchers) { ((JUnitPatcher)patcher).patchJavaParameters(module, javaParameters); } JavaSdkUtil.addRtJar(javaParameters.getClassPath()); // Append coverage parameters if appropriate if ((!(runnerSettings.getData() instanceof DebuggingRunnerData) || config.getCoverageRunner() instanceof IDEACoverageRunner) && config.isCoverageEnabled()) { myCurrentCoverageSuite = CoverageDataManager.getInstance(project).addCoverageSuite(config); LOG.info("Added coverage data with name '" + myCurrentCoverageSuite.getPresentableName() + "'"); config.appendCoverageArgument(javaParameters); } LOG.info("Test scope is: " + config.getPersistantData().getScope()); if (config.getPersistantData().getScope() == TestSearchScope.WHOLE_PROJECT) { LOG.info("Configuring for whole project"); JavaParametersUtil.configureProject(config.getProject(), javaParameters, JavaParameters.JDK_AND_CLASSES_AND_TESTS, config.ALTERNATIVE_JRE_PATH_ENABLED ? config.ALTERNATIVE_JRE_PATH : null); } else { LOG.info("Configuring for module:" + config.getConfigurationModule().getModuleName()); JavaParametersUtil.configureModule(config.getConfigurationModule(), javaParameters, JavaParameters.JDK_AND_CLASSES_AND_TESTS, config.ALTERNATIVE_JRE_PATH_ENABLED ? config.ALTERNATIVE_JRE_PATH : null); } calculateServerPort(); final TestData data = config.getPersistantData(); javaParameters.getProgramParametersList().add(TestNGCommandLineArgs.PORT_COMMAND_OPT, String.valueOf(port)); if (!is15) { javaParameters.getProgramParametersList().add(TestNGCommandLineArgs.ANNOTATIONS_COMMAND_OPT, "javadoc"); } if (data.getOutputDirectory() != null && !"".equals(data.getOutputDirectory())) { javaParameters.getProgramParametersList().add(TestNGCommandLineArgs.OUTDIR_COMMAND_OPT, data.getOutputDirectory()); } @NonNls final StringBuilder buf = new StringBuilder(); if (data.TEST_LISTENERS != null && !data.TEST_LISTENERS.isEmpty()) { buf.append(StringUtil.join(data.TEST_LISTENERS, ";")); } if (hasIDEACoverageEnabled) { if (buf.length() > 0) buf.append(";"); buf.append(IDEACoverageListener.class.getName()); } if (buf.length() > 0) javaParameters.getProgramParametersList().add(TestNGCommandLineArgs.LISTENER_COMMAND_OPT, buf.toString()); // Always include the source paths - just makes things easier :) VirtualFile[] sources; if ((data.getScope() == TestSearchScope.WHOLE_PROJECT && TestType.PACKAGE.getType().equals(data.TEST_OBJECT)) || module == null) { sources = ProjectRootManager.getInstance(project).getContentSourceRoots(); } else { sources = ModuleRootManager.getInstance(module).getSourceRoots(); } if (sources.length > 0) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < sources.length; i++) { VirtualFile source = sources[i]; sb.append(source.getPath()); if (i < sources.length - 1) { sb.append(';'); } } javaParameters.getProgramParametersList().add(TestNGCommandLineArgs.SRC_COMMAND_OPT, sb.toString()); } final Runnable runnable = new Runnable() { public void run() { Map<PsiClass, Collection<PsiMethod>> classes = new HashMap<PsiClass, Collection<PsiMethod>>(); try { fillTestObjects(classes, project, is15); //if we have testclasses, then we're not running a suite and we have to create one //LaunchSuite suite = null; //if(testPackage != null) { // List<String> packages = new ArrayList<String>(1); // packages.add(testPackage.getQualifiedName()); // suite = SuiteGenerator.createCustomizedSuite(config.project.getName(), packages, null, null, null, data.TEST_PROPERTIES, is15 ? null : "javadoc", 0); //} else if (classes.size() > 0) { Map<String, Collection<String>> map = new HashMap<String, Collection<String>>(); for (final Map.Entry<PsiClass, Collection<PsiMethod>> entry : classes.entrySet()) { Collection<String> methods = new HashSet<String>(entry.getValue().size()); for (PsiMethod method : entry.getValue()) { methods.add(method.getName()); } map.put(ApplicationManager.getApplication().runReadAction( new Computable<String>() { @Nullable public String compute() { return entry.getKey().getQualifiedName(); } } ), methods); } // We have groups we wish to limit to. Collection<String> groupNames = null; if (TestType.GROUP.getType().equals(data.TEST_OBJECT)) { String groupName = data.getGroupName(); if (groupName != null && groupName.length() > 0) { groupNames = new HashSet<String>(1); groupNames.add(groupName); } } Map<String, String> testParams = buildTestParameters(data); String annotationType = data.ANNOTATION_TYPE; if (annotationType == null || "".equals(annotationType)) { annotationType = is15 ? TestNG.JDK_ANNOTATION_TYPE : TestNG.JAVADOC_ANNOTATION_TYPE; } LOG.info("Using annotationType of " + annotationType); int logLevel = 1; try { final Properties properties = new Properties(); properties.load(new ByteArrayInputStream(config.getPersistantData().VM_PARAMETERS.getBytes())); final String verbose = properties.getProperty("-Dtestng.verbose"); if (verbose != null) { logLevel = Integer.parseInt(verbose); } } catch (Exception e) { //not a number logLevel = 1; } LaunchSuite suite = SuiteGenerator.createSuite(project.getName(), null, map, groupNames, testParams, annotationType, logLevel); File xmlFile = suite.save(new File(PathManager.getSystemPath())); String path = xmlFile.getAbsolutePath() + "\n"; try { FileUtil.writeToFile(myTempFile, path.getBytes(), true); } catch (IOException e) { LOG.error(e); } } else if (TestType.SUITE.getType().equals(data.TEST_OBJECT)) { // Running a suite, make a local copy of the suite and apply our custom parameters to it and run that instead. try { Collection<XmlSuite> suites = new Parser(data.getSuiteName()).parse(); for (XmlSuite suite : suites) { Map<String, String> params = suite.getParameters(); params.putAll(buildTestParameters(data)); String annotationType = data.ANNOTATION_TYPE; if (annotationType != null && !"".equals(annotationType)) { suite.setAnnotations(annotationType); } LOG.info("Using annotationType of " + annotationType); final String fileId = (project.getName() + '_' + suite.getName() + '_' + Integer.toHexString(suite.getName().hashCode()) + ".xml") .replace(' ', '_'); final File suiteFile = new File(PathManager.getSystemPath(), fileId); FileWriter fileWriter = new FileWriter(suiteFile); try { fileWriter.write(suite.toXml()); } finally { fileWriter.close(); } String path = suiteFile.getAbsolutePath() + "\n"; FileUtil.writeToFile(myTempFile, path.getBytes(), true); } } catch (Exception e) { throw new CantRunException("Unable to parse suite: " + e.getMessage()); } } try { FileUtil.writeToFile(myTempFile, "end".getBytes(), true); } catch (IOException e) { LOG.error(e); } } catch (CantRunException e) { try { final String message = "CantRunException" + e.getMessage(); FileUtil.writeToFile(myTempFile, message.getBytes()); } catch (IOException e1) { LOG.error(e1); } } } }; try { myTempFile = File.createTempFile("idea_testng", ".tmp"); myTempFile.deleteOnExit(); javaParameters.getProgramParametersList().add("-temp", myTempFile.getAbsolutePath()); ProgressManager.getInstance().run(new Task.Backgroundable(project, "Searching For Tests ...", true) { public void run(@NotNull ProgressIndicator indicator) { runnable.run(); } }); } catch (IOException e) { LOG.error(e); } // Configure for debugging if (runnerSettings.getData() instanceof DebuggingRunnerData) { ParametersList params = javaParameters.getVMParametersList(); String hostname = "localhost"; try { hostname = InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException e) { } params.add("-Xdebug"); params.add("-Xrunjdwp:transport=dt_socket,address=" + hostname + ':' + debugPort + ",suspend=y,server=n"); // params.add(debugPort); } return javaParameters; } protected void fillTestObjects(final Map<PsiClass, Collection<PsiMethod>> classes, final Project project, boolean is15) throws CantRunException { final TestData data = config.getPersistantData(); final PsiManager psiManager = PsiManager.getInstance(project); if (data.TEST_OBJECT.equals(TestType.PACKAGE.getType())) { final String packageName = data.getPackageName(); PsiPackage psiPackage = ApplicationManager.getApplication().runReadAction( new Computable<PsiPackage>() { @Nullable public PsiPackage compute() { return JavaPsiFacade.getInstance(psiManager.getProject()).findPackage(packageName); } } ); if (psiPackage == null) { throw CantRunException.packageNotFound(packageName); } else { TestSearchScope scope = config.getPersistantData().getScope(); //TODO we should narrow this down by module really, if that's what's specified TestClassFilter projectFilter = new TestClassFilter(scope.getSourceScope(config).getGlobalSearchScope(), config.getProject(), true); TestClassFilter filter = projectFilter.intersectionWith(PackageScope.packageScope(psiPackage, true)); classes.putAll(calculateDependencies(null, is15, TestNGUtil.getAllTestClasses(filter, false))); if (classes.size() == 0) { throw new CantRunException("No tests found in the package \"" + packageName + '\"'); } } } else if (data.TEST_OBJECT.equals(TestType.CLASS.getType())) { //it's a class final PsiClass psiClass = ApplicationManager.getApplication().runReadAction( new Computable<PsiClass>() { @Nullable public PsiClass compute() { return JavaPsiFacade.getInstance(psiManager.getProject()).findClass(data.getMainClassName(), getSearchScope()); } } ); if (psiClass == null) { throw new CantRunException("No tests found in the class \"" + data.getMainClassName() + '\"'); } if (null == ApplicationManager.getApplication().runReadAction(new Computable<String>() { @Nullable public String compute() { return psiClass.getQualifiedName(); } })) { throw new CantRunException("Cannot test anonymous or local class \"" + data.getMainClassName() + '\"'); } classes.putAll(calculateDependencies(null, is15, psiClass)); } else if (data.TEST_OBJECT.equals(TestType.METHOD.getType())) { //it's a method final PsiClass psiClass = ApplicationManager.getApplication().runReadAction( new Computable<PsiClass>() { @Nullable public PsiClass compute() { return JavaPsiFacade.getInstance(psiManager.getProject()).findClass(data.getMainClassName(), getSearchScope()); } } ); if (psiClass == null) { throw new CantRunException("No tests found in the class \"" + data.getMainClassName() + '\"'); } if (null == ApplicationManager.getApplication().runReadAction( new Computable<String>() { @Nullable public String compute() { return psiClass.getQualifiedName(); } } )) { throw new CantRunException("Cannot test anonymous or local class \"" + data.getMainClassName() + '\"'); } final PsiMethod[] methods = ApplicationManager.getApplication().runReadAction( new Computable<PsiMethod[]>() { public PsiMethod[] compute() { return psiClass.findMethodsByName(data.getMethodName(), true); } } ); classes.putAll(calculateDependencies(methods, is15, psiClass)); Collection<PsiMethod> psiMethods = classes.get(psiClass); if (psiMethods == null) { psiMethods = new LinkedHashSet<PsiMethod>(); classes.put(psiClass, psiMethods); } psiMethods.addAll(Arrays.asList(methods)); } else if (data.TEST_OBJECT.equals(TestType.GROUP.getType())) { //for a group, we include all classes PsiClass[] testClasses = TestNGUtil .getAllTestClasses(new TestClassFilter(data.getScope().getSourceScope(config).getGlobalSearchScope(), project, true), false); for (PsiClass c : testClasses) { classes.put(c, new HashSet<PsiMethod>()); } } } private static Map<String, String> buildTestParameters(TestData data) { Map<String, String> testParams = new HashMap<String, String>(); // Override with those from the test runner configuration testParams.putAll(convertPropertiesFileToMap(data.PROPERTIES_FILE)); testParams.putAll(data.TEST_PROPERTIES); return testParams; } private static Map<String, String> convertPropertiesFileToMap(String properties_file) { Map<String, String> params = new HashMap<String, String>(); if (properties_file != null) { File propertiesFile = new File(properties_file); if (propertiesFile.exists()) { Properties properties = new Properties(); try { properties.load(new FileInputStream(propertiesFile)); for (Map.Entry entry : properties.entrySet()) { params.put((String) entry.getKey(), (String) entry.getValue()); } } catch (IOException e) { LOG.error(e); } } } return params; } private void calculateServerPort() throws ExecutionException { port = 5000; int counter = 0; IOException exception = null; ServerSocket socket = null; while (counter++ < 10) { try { socket = new ServerSocket(port); break; } catch (IOException ex) { //we keep trying exception = ex; port = 5000 + (int) (Math.random() * 5000); } finally { if (socket != null) { try { socket.close(); } catch (IOException e) { } } } } if (socket == null) { throw new ExecutionException("Unable to bind to port " + port, exception); } } private Map<PsiClass, Collection<PsiMethod>> calculateDependencies(PsiMethod[] methods, final boolean is15, @Nullable final PsiClass... classes) { //we build up a list of dependencies final Map<PsiClass, Collection<PsiMethod>> results = new HashMap<PsiClass, Collection<PsiMethod>>(); if (classes != null && classes.length > 0) { final Set<String> dependencies = new HashSet<String>(); TestNGUtil.collectAnnotationValues(dependencies, "dependsOnGroups", methods, classes); ApplicationManager.getApplication().runReadAction(new Runnable() { public void run(){ if (!dependencies.isEmpty()) { final Project project = classes[0].getProject(); //we get all classes in the module to figure out which are in the groups we depend on Collection<PsiClass> allClasses; if (!is15) { allClasses = AllClassesSearch.search(getSearchScope(), project).findAll(); Map<PsiClass, Collection<PsiMethod>> filteredClasses = TestNGUtil.filterAnnotations("groups", dependencies, allClasses); //we now have a list of dependencies, and a list of classes that match those dependencies results.putAll(filteredClasses); } else { final PsiClass testAnnotation = JavaPsiFacade.getInstance(project).findClass(TestNGUtil.TEST_ANNOTATION_FQN, GlobalSearchScope.allScope(project)); LOG.assertTrue(testAnnotation != null); for (PsiMember psiMember : AnnotatedMembersSearch.search(testAnnotation, getSearchScope())) { if (TestNGUtil .isAnnotatedWithParameter(AnnotationUtil.findAnnotation(psiMember, TestNGUtil.TEST_ANNOTATION_FQN), "groups", dependencies)) { final PsiClass psiClass = psiMember instanceof PsiClass ? ((PsiClass)psiMember) : psiMember.getContainingClass(); Collection<PsiMethod> psiMethods = results.get(psiClass); if (psiMethods == null) { psiMethods = new LinkedHashSet<PsiMethod>(); results.put(psiClass, psiMethods); } if (psiMember instanceof PsiMethod) { psiMethods.add((PsiMethod)psiMember); } } } } } } }); if (methods == null) { for (PsiClass c : classes) { results.put(c, new LinkedHashSet<PsiMethod>()); } } } return results; } private GlobalSearchScope getSearchScope() { final TestData data = config.getPersistantData(); final Module module = config.getConfigurationModule().getModule(); return data.TEST_OBJECT.equals(TestType.PACKAGE.getType()) ? config.getPersistantData().getScope().getSourceScope(config).getGlobalSearchScope() : module != null ? GlobalSearchScope.moduleWithDependenciesScope(module) : GlobalSearchScope.projectScope(config.getProject()); } }
package org.batfish.datamodel; import static java.util.Comparator.naturalOrder; import static java.util.Comparator.nullsFirst; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.MoreObjects; import com.google.common.collect.Comparators; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Ordering; import java.io.Serializable; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.SortedSet; import javax.annotation.Nullable; public class HeaderSpace implements Serializable, Comparable<HeaderSpace> { private static <C extends Collection<?>> C nullIfEmpty(C collection) { return collection == null ? null : collection.isEmpty() ? null : collection; } public static class Builder { private SortedSet<Integer> _dscps; private @Nullable IpSpace _dstIps; private SortedSet<SubRange> _dstPorts; private SortedSet<Integer> _ecns; private SortedSet<SubRange> _fragmentOffsets; private SortedSet<SubRange> _icmpCodes; private SortedSet<SubRange> _icmpTypes; private SortedSet<IpProtocol> _ipProtocols; private boolean _negate; private SortedSet<Integer> _notDscps; private @Nullable IpSpace _notDstIps; private SortedSet<SubRange> _notDstPorts; private SortedSet<Integer> _notEcns; private SortedSet<SubRange> _notFragmentOffsets; private SortedSet<SubRange> _notIcmpCodes; private SortedSet<SubRange> _notIcmpTypes; private SortedSet<IpProtocol> _notIpProtocols; private SortedSet<SubRange> _notPacketLengths; private @Nullable IpSpace _notSrcIps; private SortedSet<SubRange> _notSrcPorts; private SortedSet<Protocol> _notSrcProtocols; private SortedSet<SubRange> _packetLengths; private @Nullable IpSpace _srcIps; private @Nullable IpSpace _srcOrDstIps; private SortedSet<SubRange> _srcOrDstPorts; private SortedSet<Protocol> _srcOrDstProtocols; private SortedSet<SubRange> _srcPorts; private SortedSet<Protocol> _srcProtocols; private List<TcpFlagsMatchConditions> _tcpFlags; private Builder() { _dscps = ImmutableSortedSet.of(); _dstPorts = ImmutableSortedSet.of(); _ecns = ImmutableSortedSet.of(); _fragmentOffsets = ImmutableSortedSet.of(); _icmpCodes = ImmutableSortedSet.of(); _icmpTypes = ImmutableSortedSet.of(); _ipProtocols = ImmutableSortedSet.of(); _packetLengths = ImmutableSortedSet.of(); _srcOrDstPorts = ImmutableSortedSet.of(); _srcOrDstProtocols = ImmutableSortedSet.of(); _srcPorts = ImmutableSortedSet.of(); _srcProtocols = ImmutableSortedSet.of(); _tcpFlags = ImmutableList.of(); _notDscps = ImmutableSortedSet.of(); _notDstPorts = ImmutableSortedSet.of(); _notEcns = ImmutableSortedSet.of(); _notFragmentOffsets = ImmutableSortedSet.of(); _notIcmpCodes = ImmutableSortedSet.of(); _notIcmpTypes = ImmutableSortedSet.of(); _notIpProtocols = ImmutableSortedSet.of(); _notPacketLengths = ImmutableSortedSet.of(); _notSrcPorts = ImmutableSortedSet.of(); _notSrcProtocols = ImmutableSortedSet.of(); } public void addDstIp(IpSpace dstIp) { _dstIps = AclIpSpace.union(_dstIps, dstIp); } public void addSrcIp(IpSpace srcIp) { _srcIps = AclIpSpace.union(_srcIps, srcIp); } public void addNotDstIp(IpSpace notDstIp) { _notDstIps = AclIpSpace.union(_notDstIps, notDstIp); } public void addNotSrcIp(IpSpace notSrcIp) { _notSrcIps = AclIpSpace.union(_notSrcIps, notSrcIp); } public void addSrcOrDstIp(IpSpace srcOrDstIp) { _srcOrDstIps = AclIpSpace.union(_srcOrDstIps, srcOrDstIp); } public HeaderSpace build() { return new HeaderSpace(this); } public SortedSet<Integer> getDscps() { return _dscps; } public @Nullable IpSpace getDstIps() { return _dstIps; } public SortedSet<SubRange> getDstPorts() { return _dstPorts; } public SortedSet<Integer> getEcns() { return _ecns; } public SortedSet<SubRange> getFragmentOffsets() { return _fragmentOffsets; } public SortedSet<SubRange> getIcmpCodes() { return _icmpCodes; } public SortedSet<SubRange> getIcmpTypes() { return _icmpTypes; } public SortedSet<IpProtocol> getIpProtocols() { return _ipProtocols; } public boolean getNegate() { return _negate; } public SortedSet<Integer> getNotDscps() { return _notDscps; } public @Nullable IpSpace getNotDstIps() { return _notDstIps; } public SortedSet<SubRange> getNotDstPorts() { return _notDstPorts; } public SortedSet<Integer> getNotEcns() { return _notEcns; } public SortedSet<SubRange> getNotFragmentOffsets() { return _notFragmentOffsets; } public SortedSet<SubRange> getNotIcmpCodes() { return _notIcmpCodes; } public SortedSet<SubRange> getNotIcmpTypes() { return _notIcmpTypes; } public SortedSet<IpProtocol> getNotIpProtocols() { return _notIpProtocols; } public SortedSet<SubRange> getNotPacketLengths() { return _notPacketLengths; } public @Nullable IpSpace getNotSrcIps() { return _notSrcIps; } public SortedSet<SubRange> getNotSrcPorts() { return _notSrcPorts; } public SortedSet<Protocol> getNotSrcProtocols() { return _notSrcProtocols; } public SortedSet<SubRange> getPacketLengths() { return _packetLengths; } public @Nullable IpSpace getSrcIps() { return _srcIps; } public @Nullable IpSpace getSrcOrDstIps() { return _srcOrDstIps; } public SortedSet<SubRange> getSrcOrDstPorts() { return _srcOrDstPorts; } public SortedSet<Protocol> getSrcOrDstProtocols() { return _srcOrDstProtocols; } public SortedSet<SubRange> getSrcPorts() { return _srcPorts; } public SortedSet<Protocol> getSrcProtocols() { return _srcProtocols; } public List<TcpFlagsMatchConditions> getTcpFlags() { return _tcpFlags; } public Builder setDscps(Iterable<Integer> dscps) { _dscps = ImmutableSortedSet.copyOf(dscps); return this; } public Builder setDstIps(Iterable<IpWildcard> dstIps) { _dstIps = IpWildcardSetIpSpace.builder().including(dstIps).build(); return this; } public Builder setDstIps(IpSpace dstIps) { _dstIps = dstIps; return this; } public Builder setDstPorts(SubRange... dstPorts) { return setDstPorts(Arrays.asList(dstPorts)); } public Builder setDstPorts(Iterable<SubRange> dstPorts) { _dstPorts = ImmutableSortedSet.copyOf(dstPorts); return this; } public Builder setEcns(Iterable<Integer> ecns) { _ecns = ImmutableSortedSet.copyOf(ecns); return this; } public Builder setFragmentOffsets(Iterable<SubRange> fragmentOffsets) { _fragmentOffsets = ImmutableSortedSet.copyOf(fragmentOffsets); return this; } public Builder setIcmpCodes(Iterable<SubRange> icmpCodes) { _icmpCodes = ImmutableSortedSet.copyOf(icmpCodes); return this; } public Builder setIcmpCodes(SubRange... icmpCodes) { _icmpCodes = ImmutableSortedSet.copyOf(icmpCodes); return this; } public Builder setIcmpCodes(int... icmpCodes) { _icmpCodes = Arrays.stream(icmpCodes) .mapToObj(SubRange::singleton) .collect(ImmutableSortedSet.toImmutableSortedSet(Ordering.natural())); return this; } public Builder setIcmpTypes(Iterable<SubRange> icmpTypes) { _icmpTypes = ImmutableSortedSet.copyOf(icmpTypes); return this; } public Builder setIcmpTypes(SubRange... icmpTypes) { _icmpTypes = ImmutableSortedSet.copyOf(icmpTypes); return this; } public Builder setIcmpTypes(int... icmpTypes) { _icmpTypes = Arrays.stream(icmpTypes) .mapToObj(SubRange::singleton) .collect(ImmutableSortedSet.toImmutableSortedSet(Ordering.natural())); return this; } public Builder setIpProtocols(IpProtocol... ipProtocols) { return setIpProtocols(Arrays.asList(ipProtocols)); } public Builder setIpProtocols(Iterable<IpProtocol> ipProtocols) { _ipProtocols = ImmutableSortedSet.copyOf(ipProtocols); return this; } public Builder setNegate(boolean negate) { _negate = negate; return this; } public Builder setNotDscps(Iterable<Integer> notDscps) { _notDscps = ImmutableSortedSet.copyOf(notDscps); return this; } public Builder setNotDstIps(Iterable<IpWildcard> notDstIps) { _notDstIps = IpWildcardSetIpSpace.builder().including(notDstIps).build(); return this; } public Builder setNotDstIps(IpSpace notDstIps) { _notDstIps = notDstIps; return this; } public Builder setNotDstPorts(Iterable<SubRange> notDstPorts) { _notDstPorts = ImmutableSortedSet.copyOf(notDstPorts); return this; } public Builder setNotEcns(Iterable<Integer> notEcns) { _notEcns = ImmutableSortedSet.copyOf(notEcns); return this; } public Builder setNotFragmentOffsets(Iterable<SubRange> notFragmentOffsets) { _notFragmentOffsets = ImmutableSortedSet.copyOf(notFragmentOffsets); return this; } public Builder setNotIcmpCodes(Iterable<SubRange> notIcmpCodes) { _notIcmpCodes = ImmutableSortedSet.copyOf(notIcmpCodes); return this; } public Builder setNotIcmpTypes(Iterable<SubRange> notIcmpTypes) { _notIcmpTypes = ImmutableSortedSet.copyOf(notIcmpTypes); return this; } public Builder setNotIpProtocols(Iterable<IpProtocol> notIpProtocols) { _notIpProtocols = ImmutableSortedSet.copyOf(notIpProtocols); return this; } public Builder setNotPacketLengths(Iterable<SubRange> notPacketLengths) { _notPacketLengths = ImmutableSortedSet.copyOf(notPacketLengths); return this; } public Builder setNotSrcIps(Iterable<IpWildcard> notSrcIps) { _notSrcIps = IpWildcardSetIpSpace.builder().including(notSrcIps).build(); return this; } public Builder setNotSrcIps(IpSpace notSrcIps) { _notSrcIps = notSrcIps; return this; } public Builder setNotSrcPorts(Iterable<SubRange> notSrcPorts) { _notSrcPorts = ImmutableSortedSet.copyOf(notSrcPorts); return this; } public Builder setNotSrcProtocols(Iterable<Protocol> notSrcProtocols) { _notSrcProtocols = ImmutableSortedSet.copyOf(notSrcProtocols); return this; } public Builder setPacketLengths(Iterable<SubRange> packetLengths) { _packetLengths = ImmutableSortedSet.copyOf(packetLengths); return this; } public Builder setSrcIps(Iterable<IpWildcard> srcIps) { _srcIps = IpWildcardSetIpSpace.builder().including(srcIps).build(); return this; } public Builder setSrcIps(IpSpace srcIps) { _srcIps = srcIps; return this; } public Builder setSrcOrDstIps(Iterable<IpWildcard> srcOrDstIps) { _srcOrDstIps = IpWildcardSetIpSpace.builder().including(srcOrDstIps).build(); return this; } public Builder setSrcOrDstIps(IpSpace srcOrDstIps) { _srcOrDstIps = srcOrDstIps; return this; } public Builder setSrcOrDstPorts(Iterable<SubRange> srcOrDstPorts) { _srcOrDstPorts = ImmutableSortedSet.copyOf(srcOrDstPorts); return this; } public Builder setSrcOrDstProtocols(Iterable<Protocol> srcOrDstProtocols) { _srcOrDstProtocols = ImmutableSortedSet.copyOf(srcOrDstProtocols); return this; } public Builder setSrcPorts(SubRange... srcPorts) { return setSrcPorts(Arrays.asList(srcPorts)); } public Builder setSrcPorts(Iterable<SubRange> srcPorts) { _srcPorts = ImmutableSortedSet.copyOf(srcPorts); return this; } public Builder setSrcProtocols(Iterable<Protocol> srcProtocols) { _srcProtocols = ImmutableSortedSet.copyOf(srcProtocols); return this; } public Builder setTcpFlags(Iterable<TcpFlagsMatchConditions> tcpFlags) { _tcpFlags = ImmutableList.copyOf(tcpFlags); return this; } } private static final Comparator<HeaderSpace> COMPARATOR = Comparator.comparing(HeaderSpace::getDscps, Comparators.lexicographical(Ordering.natural())) .thenComparing(HeaderSpace::getDstIps, nullsFirst(naturalOrder())) .thenComparing(HeaderSpace::getDstPorts, Comparators.lexicographical(Ordering.natural())) .thenComparing(HeaderSpace::getEcns, Comparators.lexicographical(Ordering.natural())) .thenComparing( HeaderSpace::getFragmentOffsets, Comparators.lexicographical(Ordering.natural())) .thenComparing(HeaderSpace::getIcmpCodes, Comparators.lexicographical(Ordering.natural())) .thenComparing(HeaderSpace::getIcmpTypes, Comparators.lexicographical(Ordering.natural())) .thenComparing( HeaderSpace::getIpProtocols, Comparators.lexicographical(Ordering.natural())) .thenComparing(HeaderSpace::getNegate) .thenComparing(HeaderSpace::getNotDscps, Comparators.lexicographical(Ordering.natural())) .thenComparing(HeaderSpace::getNotDstIps, nullsFirst(naturalOrder())) .thenComparing( HeaderSpace::getNotDstPorts, Comparators.lexicographical(Ordering.natural())) .thenComparing(HeaderSpace::getNotEcns, Comparators.lexicographical(Ordering.natural())) .thenComparing( HeaderSpace::getNotFragmentOffsets, Comparators.lexicographical(Ordering.natural())) .thenComparing( HeaderSpace::getNotIcmpCodes, Comparators.lexicographical(Ordering.natural())) .thenComparing( HeaderSpace::getNotIcmpTypes, Comparators.lexicographical(Ordering.natural())) .thenComparing( HeaderSpace::getNotIpProtocols, Comparators.lexicographical(Ordering.natural())) .thenComparing( HeaderSpace::getNotPacketLengths, Comparators.lexicographical(Ordering.natural())) .thenComparing(HeaderSpace::getNotSrcIps, nullsFirst(naturalOrder())) .thenComparing( HeaderSpace::getNotSrcPorts, Comparators.lexicographical(Ordering.natural())) .thenComparing( HeaderSpace::getPacketLengths, Comparators.lexicographical(Ordering.natural())) .thenComparing(HeaderSpace::getSrcIps, nullsFirst(naturalOrder())) .thenComparing(HeaderSpace::getSrcOrDstIps, nullsFirst(naturalOrder())) .thenComparing( HeaderSpace::getSrcOrDstPorts, Comparators.lexicographical(Ordering.natural())) .thenComparing(HeaderSpace::getSrcPorts, Comparators.lexicographical(Ordering.natural())) .thenComparing(HeaderSpace::getTcpFlags, Comparators.lexicographical(Ordering.natural())); private static final String PROP_DSCPS = "dscps"; private static final String PROP_DST_IPS = "dstIps"; private static final String PROP_DST_PORTS = "dstPorts"; private static final String PROP_ECNS = "ecns"; private static final String PROP_FRAGMENT_OFFSETS = "fragmentOffsets"; private static final String PROP_ICMP_CODES = "icmpCodes"; private static final String PROP_ICMP_TYPES = "icmpTypes"; private static final String PROP_IP_PROTOCOLS = "ipProtocols"; private static final String PROP_NEGATE = "negate"; private static final String PROP_NOT_DSCPS = "notDscps"; private static final String PROP_NOT_DST_IPS = "notDstIps"; private static final String PROP_NOT_DST_PORTS = "notDstPorts"; private static final String PROP_NOT_ECNS = "notEcns"; private static final String PROP_NOT_FRAGMENT_OFFSETS = "notFragmentOffsets"; private static final String PROP_NOT_ICMP_CODES = "notIcmpCodes"; private static final String PROP_NOT_ICMP_TYPES = "notIcmpTypes"; private static final String PROP_NOT_IP_PROTOCOLS = "notIpProtocols"; private static final String PROP_NOT_PACKET_LENGTHS = "notPacketLengths"; private static final String PROP_NOT_SRC_IPS = "notSrcIps"; private static final String PROP_NOT_SRC_PORTS = "notSrcPorts"; private static final String PROP_PACKET_LENGTHS = "packetLengths"; private static final String PROP_SRC_IPS = "srcIps"; private static final String PROP_SRC_OR_DST_IPS = "srcOrDstIps"; private static final String PROP_SRC_OR_DST_PORTS = "srcOrDstPorts"; private static final String PROP_SRC_PORTS = "srcPorts"; private static final String PROP_TCP_FLAGS_MATCH_CONDITIONS = "tcpFlagsMatchConditions"; private static final String PROP_DEPRECATED_STATES = "states"; public static Builder builder() { return new Builder(); } private SortedSet<Integer> _dscps; private IpSpace _dstIps; private SortedSet<SubRange> _dstPorts; private SortedSet<Integer> _ecns; private SortedSet<SubRange> _fragmentOffsets; private SortedSet<SubRange> _icmpCodes; private SortedSet<SubRange> _icmpTypes; private SortedSet<IpProtocol> _ipProtocols; private boolean _negate; private SortedSet<Integer> _notDscps; private IpSpace _notDstIps; private SortedSet<SubRange> _notDstPorts; private SortedSet<Integer> _notEcns; private SortedSet<SubRange> _notFragmentOffsets; private SortedSet<SubRange> _notIcmpCodes; private SortedSet<SubRange> _notIcmpTypes; private SortedSet<IpProtocol> _notIpProtocols; private SortedSet<SubRange> _notPacketLengths; private IpSpace _notSrcIps; private SortedSet<SubRange> _notSrcPorts; private SortedSet<SubRange> _packetLengths; private IpSpace _srcIps; private IpSpace _srcOrDstIps; private SortedSet<SubRange> _srcOrDstPorts; private SortedSet<SubRange> _srcPorts; private List<TcpFlagsMatchConditions> _tcpFlags; public HeaderSpace() { _dscps = Collections.emptySortedSet(); _dstPorts = Collections.emptySortedSet(); _ecns = Collections.emptySortedSet(); _fragmentOffsets = Collections.emptySortedSet(); _icmpCodes = Collections.emptySortedSet(); _icmpTypes = Collections.emptySortedSet(); _ipProtocols = Collections.emptySortedSet(); _packetLengths = Collections.emptySortedSet(); _srcOrDstPorts = Collections.emptySortedSet(); _srcPorts = Collections.emptySortedSet(); _tcpFlags = Collections.emptyList(); _notDscps = Collections.emptySortedSet(); _notDstPorts = Collections.emptySortedSet(); _notEcns = Collections.emptySortedSet(); _notFragmentOffsets = Collections.emptySortedSet(); _notIcmpCodes = Collections.emptySortedSet(); _notIcmpTypes = Collections.emptySortedSet(); _notIpProtocols = Collections.emptySortedSet(); _notPacketLengths = Collections.emptySortedSet(); _notSrcPorts = Collections.emptySortedSet(); } private HeaderSpace(Builder builder) { _dscps = ImmutableSortedSet.copyOf(builder._dscps); _dstIps = builder._dstIps; _dstPorts = ImmutableSortedSet.copyOf(builder._dstPorts); _ecns = ImmutableSortedSet.copyOf(builder._ecns); _fragmentOffsets = ImmutableSortedSet.copyOf(builder._fragmentOffsets); _ipProtocols = ImmutableSortedSet.copyOf(builder._ipProtocols); _negate = builder._negate; _notDscps = ImmutableSortedSet.copyOf(builder._notDscps); _notDstIps = builder._notDstIps; _notDstPorts = ImmutableSortedSet.copyOf(builder._notDstPorts); _notEcns = ImmutableSortedSet.copyOf(builder._notEcns); _notFragmentOffsets = ImmutableSortedSet.copyOf(builder._notFragmentOffsets); _notIcmpCodes = ImmutableSortedSet.copyOf(builder._notIcmpCodes); _notIcmpTypes = ImmutableSortedSet.copyOf(builder._notIcmpTypes); _notIpProtocols = ImmutableSortedSet.copyOf(builder._notIpProtocols); _notPacketLengths = ImmutableSortedSet.copyOf(builder._notPacketLengths); _notSrcIps = builder._notSrcIps; _notSrcPorts = ImmutableSortedSet.copyOf(builder._notSrcPorts); _packetLengths = ImmutableSortedSet.copyOf(builder._packetLengths); _srcIps = builder._srcIps; _srcOrDstIps = builder._srcOrDstIps; _srcOrDstPorts = ImmutableSortedSet.copyOf(builder._srcOrDstPorts); _srcPorts = ImmutableSortedSet.copyOf(builder._srcPorts); _icmpTypes = ImmutableSortedSet.copyOf(builder._icmpTypes); _icmpCodes = ImmutableSortedSet.copyOf(builder._icmpCodes); _tcpFlags = ImmutableList.copyOf(builder._tcpFlags); } @Override public int compareTo(HeaderSpace o) { return COMPARATOR.compare(this, o); } @Override public boolean equals(Object o) { if (o == this) { return true; } else if (!(o instanceof HeaderSpace)) { return false; } HeaderSpace other = (HeaderSpace) o; return _dscps.equals(other._dscps) && Objects.equals(_dstIps, other._dstIps) && _dstPorts.equals(other._dstPorts) && _ecns.equals(other._ecns) && _fragmentOffsets.equals(other._fragmentOffsets) && _icmpCodes.equals(other._icmpCodes) && _icmpTypes.equals(other._icmpTypes) && _ipProtocols.equals(other._ipProtocols) && _negate == other._negate && _notDscps.equals(other._notDscps) && Objects.equals(_notDstIps, other._notDstIps) && _notDstPorts.equals(other._notDstPorts) && _notEcns.equals(other._notEcns) && _notFragmentOffsets.equals(other._notFragmentOffsets) && _notIcmpCodes.equals(other._notIcmpCodes) && _notIcmpTypes.equals(other._notIcmpTypes) && _notIpProtocols.equals(other._notIpProtocols) && _notPacketLengths.equals(other._notPacketLengths) && Objects.equals(_notSrcIps, other._notSrcIps) && _notSrcPorts.equals(other._notSrcPorts) && _packetLengths.equals(other._packetLengths) && Objects.equals(_srcIps, other._srcIps) && Objects.equals(_srcOrDstIps, other._srcOrDstIps) && _srcOrDstPorts.equals(other._srcOrDstPorts) && _srcPorts.equals(other._srcPorts) && _tcpFlags.equals(other._tcpFlags); } /** A set of acceptable DSCP values for a packet. */ @JsonProperty(PROP_DSCPS) public SortedSet<Integer> getDscps() { return _dscps; } /** * A space of acceptable destination IP addresses for a packet. * * <p>The empty set of dstIps is interpreted as no constraint, or all IPs */ @JsonProperty(PROP_DST_IPS) public IpSpace getDstIps() { return _dstIps; } /** A set of acceptable destination port ranges for a TCP/UDP packet. */ @JsonProperty(PROP_DST_PORTS) public SortedSet<SubRange> getDstPorts() { return _dstPorts; } /** A set of acceptable ECN values for a packet. */ @JsonProperty(PROP_ECNS) public SortedSet<Integer> getEcns() { return _ecns; } /** A set of acceptable fragment offsets for a UDP packet. */ @JsonProperty(PROP_FRAGMENT_OFFSETS) public SortedSet<SubRange> getFragmentOffsets() { return _fragmentOffsets; } /** A set of acceptable ICMP code ranges for an ICMP packet. */ @JsonProperty(PROP_ICMP_CODES) public SortedSet<SubRange> getIcmpCodes() { return _icmpCodes; } /** A set of acceptable ICMP type ranges for an ICMP packet. */ @JsonProperty(PROP_ICMP_TYPES) public SortedSet<SubRange> getIcmpTypes() { return _icmpTypes; } /** A set of acceptable IP protocols for a packet. */ @JsonProperty(PROP_IP_PROTOCOLS) public SortedSet<IpProtocol> getIpProtocols() { return _ipProtocols; } /** Determines whether to match the complement of the stated criteria of this header space. */ @JsonProperty(PROP_NEGATE) public boolean getNegate() { return _negate; } /** A set of unacceptable DSCP values for a packet. */ @JsonProperty(PROP_NOT_DSCPS) public SortedSet<Integer> getNotDscps() { return _notDscps; } /** A space of unacceptable destination IP addresses for a packet. */ @JsonProperty(PROP_NOT_DST_IPS) public IpSpace getNotDstIps() { return _notDstIps; } /** A set of unacceptable destination port ranges for a TCP/UDP packet. */ @JsonProperty(PROP_NOT_DST_PORTS) public SortedSet<SubRange> getNotDstPorts() { return _notDstPorts; } /** A set of unacceptable ECN values for a packet. */ @JsonProperty(PROP_NOT_ECNS) public SortedSet<Integer> getNotEcns() { return _notEcns; } /** A set of unacceptable fragment offsets for a UDP packet. */ @JsonProperty(PROP_NOT_FRAGMENT_OFFSETS) public SortedSet<SubRange> getNotFragmentOffsets() { return _notFragmentOffsets; } /** A set of unacceptable ICMP code ranges for an ICMP packet. */ @JsonProperty(PROP_NOT_ICMP_CODES) public SortedSet<SubRange> getNotIcmpCodes() { return _notIcmpCodes; } /** A set of unacceptable ICMP type ranges for an ICMP packet. */ @JsonProperty(PROP_NOT_ICMP_TYPES) public SortedSet<SubRange> getNotIcmpTypes() { return _notIcmpTypes; } /** A set of unacceptable IP protocols for a packet. */ @JsonProperty(PROP_NOT_IP_PROTOCOLS) public SortedSet<IpProtocol> getNotIpProtocols() { return _notIpProtocols; } @JsonProperty(PROP_NOT_PACKET_LENGTHS) public SortedSet<SubRange> getNotPacketLengths() { return _notPacketLengths; } /** A space of unacceptable source IP addresses for a packet. */ @JsonProperty(PROP_NOT_SRC_IPS) public IpSpace getNotSrcIps() { return _notSrcIps; } /** A set of unacceptable source port ranges for a TCP/UDP packet. */ @JsonProperty(PROP_NOT_SRC_PORTS) public SortedSet<SubRange> getNotSrcPorts() { return _notSrcPorts; } @JsonProperty(PROP_PACKET_LENGTHS) public SortedSet<SubRange> getPacketLengths() { return _packetLengths; } /** A space of acceptable source IP addresses for a packet. */ @JsonProperty(PROP_SRC_IPS) public IpSpace getSrcIps() { return _srcIps; } /** * A space of IP addresses within which either the source or the destination IP of a packet must * fall for acceptance. */ @JsonProperty(PROP_SRC_OR_DST_IPS) public IpSpace getSrcOrDstIps() { return _srcOrDstIps; } /** * A set of ranges within which either the source or the destination port of a TCP/UDP packet must * fall for acceptance. */ @JsonProperty(PROP_SRC_OR_DST_PORTS) public SortedSet<SubRange> getSrcOrDstPorts() { return _srcOrDstPorts; } /** A set of acceptable source port ranges for a TCP/UDP packet. */ @JsonProperty(PROP_SRC_PORTS) public SortedSet<SubRange> getSrcPorts() { return _srcPorts; } /** A set of acceptable TCP flag bitmasks for a TCP packet to match. */ @JsonProperty(PROP_TCP_FLAGS_MATCH_CONDITIONS) public List<TcpFlagsMatchConditions> getTcpFlags() { return _tcpFlags; } @Override public int hashCode() { return Objects.hash( _dscps, _dstIps, _dstPorts, _ecns, _fragmentOffsets, _icmpCodes, _icmpTypes, _ipProtocols, _negate, _notDscps, _notDstIps, _notDstPorts, _notEcns, _notFragmentOffsets, _notIcmpCodes, _notIcmpTypes, _notIpProtocols, _notPacketLengths, _notSrcIps, _notSrcPorts, _packetLengths, _srcIps, _srcOrDstIps, _srcOrDstPorts, _srcPorts, _tcpFlags); } public boolean matches(Flow flow, Map<String, IpSpace> namedIpSpaces) { if (!_dscps.isEmpty() && !_dscps.contains(flow.getDscp())) { return false; } if (_notDscps.contains(flow.getDscp())) { return false; } if (_dstIps != null && !_dstIps.containsIp(flow.getDstIp(), namedIpSpaces)) { return false; } if (_notDstIps != null && _notDstIps.containsIp(flow.getDstIp(), namedIpSpaces)) { return false; } if (!_dstPorts.isEmpty() && _dstPorts.stream().noneMatch(sr -> sr.includes(flow.getDstPort()))) { return false; } if (!_notDstPorts.isEmpty() && _notDstPorts.stream().anyMatch(sr -> sr.includes(flow.getDstPort()))) { return false; } if (!_ecns.isEmpty() && !_ecns.contains(flow.getEcn())) { return false; } if (_notEcns.contains(flow.getEcn())) { return false; } if (!_fragmentOffsets.isEmpty() && _fragmentOffsets.stream().noneMatch(sr -> sr.includes(flow.getFragmentOffset()))) { return false; } if (!_notFragmentOffsets.isEmpty() && _notFragmentOffsets.stream().anyMatch(sr -> sr.includes(flow.getFragmentOffset()))) { return false; } if (!_icmpCodes.isEmpty() && flow.getIcmpCode() != null && _icmpCodes.stream().noneMatch(sr -> sr.includes(flow.getIcmpCode()))) { return false; } if (!_notIcmpCodes.isEmpty() && _notIcmpCodes.stream().anyMatch(sr -> sr.includes(flow.getFragmentOffset()))) { return false; } if (!_icmpTypes.isEmpty() && flow.getIcmpType() != null && _icmpTypes.stream().noneMatch(sr -> sr.includes(flow.getIcmpType()))) { return false; } if (!_notIcmpTypes.isEmpty() && _notIcmpTypes.stream().anyMatch(sr -> sr.includes(flow.getFragmentOffset()))) { return false; } if (!_ipProtocols.isEmpty() && !_ipProtocols.contains(flow.getIpProtocol())) { return false; } if (_notIpProtocols.contains(flow.getIpProtocol())) { return false; } if (!_packetLengths.isEmpty() && _packetLengths.stream().noneMatch(sr -> sr.includes(flow.getPacketLength()))) { return false; } if (!_notPacketLengths.isEmpty() && _notPacketLengths.stream().anyMatch(sr -> sr.includes(flow.getPacketLength()))) { return false; } if (_srcOrDstIps != null && !(_srcOrDstIps.containsIp(flow.getSrcIp(), namedIpSpaces) || _srcOrDstIps.containsIp(flow.getDstIp(), namedIpSpaces))) { return false; } if (!_srcOrDstPorts.isEmpty() && _srcOrDstPorts.stream() .noneMatch(sr -> sr.includes(flow.getSrcPort()) || sr.includes(flow.getDstPort()))) { return false; } if (_srcIps != null && !_srcIps.containsIp(flow.getSrcIp(), namedIpSpaces)) { return false; } if (_notSrcIps != null && _notSrcIps.containsIp(flow.getSrcIp(), namedIpSpaces)) { return false; } if (!_srcPorts.isEmpty() && _srcPorts.stream().noneMatch(sr -> sr.includes(flow.getSrcPort()))) { return false; } if (!_notSrcPorts.isEmpty() && _notSrcPorts.stream().anyMatch(sr -> sr.includes(flow.getSrcPort()))) { return false; } if (!_tcpFlags.isEmpty() && _tcpFlags.stream().noneMatch(tcpFlags -> tcpFlags.match(flow))) { return false; } return true; } @JsonProperty(PROP_DSCPS) public void setDscps(Iterable<Integer> dscps) { _dscps = ImmutableSortedSet.copyOf(dscps); } @JsonProperty(PROP_DST_IPS) public void setDstIps(IpSpace dstIps) { _dstIps = dstIps; } public void setDstIps(Iterable<IpWildcard> dstIps) { _dstIps = IpWildcardSetIpSpace.builder().including(dstIps).build(); } @JsonProperty(PROP_DST_PORTS) public void setDstPorts(Iterable<SubRange> dstPorts) { _dstPorts = ImmutableSortedSet.copyOf(dstPorts); } @JsonProperty(PROP_ECNS) public void setEcns(Iterable<Integer> ecns) { _ecns = ImmutableSortedSet.copyOf(ecns); } @JsonProperty(PROP_FRAGMENT_OFFSETS) public void setFragmentOffsets(Iterable<SubRange> fragmentOffsets) { _fragmentOffsets = ImmutableSortedSet.copyOf(fragmentOffsets); } @JsonProperty(PROP_ICMP_CODES) public void setIcmpCodes(Iterable<SubRange> icmpCodes) { _icmpCodes = ImmutableSortedSet.copyOf(icmpCodes); } @JsonProperty(PROP_ICMP_TYPES) public void setIcmpTypes(Iterable<SubRange> icmpTypes) { _icmpTypes = ImmutableSortedSet.copyOf(icmpTypes); } @JsonProperty(PROP_IP_PROTOCOLS) public void setIpProtocols(Iterable<IpProtocol> ipProtocols) { _ipProtocols = ImmutableSortedSet.copyOf(ipProtocols); } @JsonProperty(PROP_NEGATE) public void setNegate(boolean negate) { _negate = negate; } @JsonProperty(PROP_NOT_DSCPS) public void setNotDscps(Iterable<Integer> notDscps) { _notDscps = ImmutableSortedSet.copyOf(notDscps); } @JsonProperty(PROP_NOT_DST_IPS) public void setNotDstIps(IpSpace notDstIps) { _notDstIps = notDstIps; } public void setNotDstIps(Iterable<IpWildcard> notDstIps) { _notDstIps = IpWildcardSetIpSpace.builder().including(notDstIps).build(); } @JsonProperty(PROP_NOT_DST_PORTS) public void setNotDstPorts(Iterable<SubRange> notDstPorts) { _notDstPorts = ImmutableSortedSet.copyOf(notDstPorts); } @JsonProperty(PROP_NOT_ECNS) public void setNotEcns(Iterable<Integer> notEcns) { _notEcns = ImmutableSortedSet.copyOf(notEcns); } @JsonProperty(PROP_NOT_FRAGMENT_OFFSETS) public void setNotFragmentOffsets(Iterable<SubRange> notFragmentOffsets) { _notFragmentOffsets = ImmutableSortedSet.copyOf(notFragmentOffsets); } @JsonProperty(PROP_NOT_ICMP_CODES) public void setNotIcmpCodes(Iterable<SubRange> notIcmpCodes) { _notIcmpCodes = ImmutableSortedSet.copyOf(notIcmpCodes); } @JsonProperty(PROP_NOT_ICMP_TYPES) public void setNotIcmpTypes(Iterable<SubRange> notIcmpTypes) { _notIcmpTypes = ImmutableSortedSet.copyOf(notIcmpTypes); } @JsonProperty(PROP_NOT_IP_PROTOCOLS) public void setNotIpProtocols(Iterable<IpProtocol> notIpProtocols) { _notIpProtocols = ImmutableSortedSet.copyOf(notIpProtocols); } @JsonProperty(PROP_NOT_PACKET_LENGTHS) public void setNotPacketLengths(Iterable<SubRange> notPacketLengths) { _notPacketLengths = ImmutableSortedSet.copyOf(notPacketLengths); } @JsonProperty(PROP_NOT_SRC_IPS) public void setNotSrcIps(IpSpace notSrcIps) { _notSrcIps = notSrcIps; } public void setNotSrcIps(Iterable<IpWildcard> notSrcIps) { _notSrcIps = IpWildcardSetIpSpace.builder().including(notSrcIps).build(); } @JsonProperty(PROP_NOT_SRC_PORTS) public void setNotSrcPorts(Iterable<SubRange> notSrcPorts) { _notSrcPorts = ImmutableSortedSet.copyOf(notSrcPorts); } @JsonProperty(PROP_PACKET_LENGTHS) public void setPacketLengths(Iterable<SubRange> packetLengths) { _packetLengths = ImmutableSortedSet.copyOf(packetLengths); } @JsonProperty(PROP_SRC_IPS) public void setSrcIps(IpSpace srcIps) { _srcIps = srcIps; } public void setSrcIps(Iterable<IpWildcard> srcIps) { _srcIps = IpWildcardSetIpSpace.builder().including(srcIps).build(); } @JsonProperty(PROP_SRC_OR_DST_IPS) public void setSrcOrDstIps(IpSpace srcOrDstIps) { _srcOrDstIps = srcOrDstIps; } public void setSrcOrDstIps(Iterable<IpWildcard> srcOrDstIps) { _srcOrDstIps = IpWildcardSetIpSpace.builder().including(srcOrDstIps).build(); } @JsonProperty(PROP_SRC_OR_DST_PORTS) public void setSrcOrDstPorts(Iterable<SubRange> srcOrDstPorts) { _srcOrDstPorts = ImmutableSortedSet.copyOf(srcOrDstPorts); } @JsonProperty(PROP_SRC_PORTS) public void setSrcPorts(Iterable<SubRange> srcPorts) { _srcPorts = ImmutableSortedSet.copyOf(srcPorts); } @Deprecated @JsonProperty(PROP_DEPRECATED_STATES) private void setStates(Object ignored) {} @JsonProperty(PROP_TCP_FLAGS_MATCH_CONDITIONS) public void setTcpFlags(Iterable<TcpFlagsMatchConditions> tcpFlags) { _tcpFlags = ImmutableList.copyOf(tcpFlags); } public Builder toBuilder() { return builder() .setDscps(_dscps) .setDstIps(_dstIps) .setDstPorts(_dstPorts) .setEcns(_ecns) .setFragmentOffsets(_fragmentOffsets) .setIcmpCodes(_icmpCodes) .setIcmpTypes(_icmpTypes) .setIpProtocols(_ipProtocols) .setNegate(_negate) .setNotDscps(_notDscps) .setNotDstIps(_notDstIps) .setNotDstPorts(_notDstPorts) .setNotEcns(_notEcns) .setNotFragmentOffsets(_notFragmentOffsets) .setNotIcmpCodes(_notIcmpCodes) .setNotIcmpTypes(_notIcmpTypes) .setNotIpProtocols(_notIpProtocols) .setNotPacketLengths(_notPacketLengths) .setNotSrcIps(_notSrcIps) .setNotSrcPorts(_notSrcPorts) .setPacketLengths(_packetLengths) .setSrcIps(_srcIps) .setSrcOrDstIps(_srcOrDstIps) .setSrcOrDstPorts(_srcOrDstPorts) .setSrcPorts(_srcPorts) .setTcpFlags(_tcpFlags); } @Override public String toString() { return MoreObjects.toStringHelper(getClass()) .omitNullValues() .add(PROP_DSCPS, nullIfEmpty(_dscps)) .add(PROP_DST_IPS, _dstIps) .add(PROP_DST_PORTS, nullIfEmpty(_dstPorts)) .add(PROP_ECNS, nullIfEmpty(_ecns)) .add(PROP_FRAGMENT_OFFSETS, nullIfEmpty(_fragmentOffsets)) .add(PROP_ICMP_CODES, nullIfEmpty(_icmpCodes)) .add(PROP_ICMP_TYPES, nullIfEmpty(_icmpTypes)) .add(PROP_IP_PROTOCOLS, nullIfEmpty(_ipProtocols)) .add(PROP_NEGATE, _negate ? _negate : null) .add(PROP_NOT_DSCPS, nullIfEmpty(_notDscps)) .add(PROP_NOT_DST_IPS, _notDstIps) .add(PROP_NOT_DST_PORTS, nullIfEmpty(_notDstPorts)) .add(PROP_NOT_ECNS, nullIfEmpty(_notEcns)) .add(PROP_NOT_FRAGMENT_OFFSETS, nullIfEmpty(_notFragmentOffsets)) .add(PROP_NOT_ICMP_CODES, nullIfEmpty(_notIcmpCodes)) .add(PROP_NOT_ICMP_TYPES, nullIfEmpty(_notIcmpTypes)) .add(PROP_NOT_IP_PROTOCOLS, nullIfEmpty(_notIpProtocols)) .add(PROP_NOT_PACKET_LENGTHS, nullIfEmpty(_notPacketLengths)) .add(PROP_NOT_SRC_IPS, _notSrcIps) .add(PROP_NOT_SRC_PORTS, nullIfEmpty(_notSrcPorts)) .add(PROP_PACKET_LENGTHS, nullIfEmpty(_packetLengths)) .add(PROP_SRC_IPS, _srcIps) .add(PROP_SRC_OR_DST_IPS, _srcOrDstIps) .add(PROP_SRC_OR_DST_PORTS, nullIfEmpty(_srcOrDstPorts)) .add(PROP_SRC_PORTS, nullIfEmpty(_srcPorts)) .add(PROP_TCP_FLAGS_MATCH_CONDITIONS, nullIfEmpty(_tcpFlags)) .toString(); } public final boolean unrestricted() { boolean ret = _dscps.isEmpty() && _notDscps.isEmpty() && _dstIps instanceof UniverseIpSpace && _notDstIps instanceof EmptyIpSpace && _dstPorts.isEmpty() && _notDstPorts.isEmpty() && _ecns.isEmpty() && _notEcns.isEmpty() && _fragmentOffsets.isEmpty() && _notFragmentOffsets.isEmpty() && _icmpCodes.isEmpty() && _notIcmpCodes.isEmpty() && _icmpTypes.isEmpty() && _notIcmpTypes.isEmpty() && _ipProtocols.isEmpty() && _notIpProtocols.isEmpty() && _packetLengths.isEmpty() && _notPacketLengths.isEmpty() && _srcIps instanceof UniverseIpSpace && _notSrcIps instanceof EmptyIpSpace && _srcOrDstIps instanceof UniverseIpSpace && _srcOrDstPorts.isEmpty() && _srcPorts.isEmpty() && _notSrcPorts.isEmpty() && _tcpFlags.isEmpty(); return ret; } }
package org.motechproject.nms.rch.service.impl; import org.apache.axis.Message; import org.apache.axis.MessageContext; import org.apache.axis.description.TypeDesc; import org.apache.axis.encoding.SerializationContext; import org.apache.axis.encoding.ser.BeanSerializer; import org.apache.axis.server.AxisServer; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.time.StopWatch; import org.datanucleus.store.rdbms.query.ForwardQueryResult; import org.joda.time.DateTime; import org.joda.time.LocalDate; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.motechproject.alerts.contract.AlertService; import org.motechproject.alerts.domain.AlertStatus; import org.motechproject.alerts.domain.AlertType; import org.motechproject.event.MotechEvent; import org.motechproject.event.listener.EventRelay; import org.motechproject.event.listener.annotations.MotechListener; import org.motechproject.mds.query.QueryParams; import org.motechproject.mds.query.SqlQueryExecution; import org.motechproject.mds.util.Order; import org.motechproject.metrics.service.Timer; import org.motechproject.nms.flw.domain.FrontLineWorker; import org.motechproject.nms.flw.domain.FrontLineWorkerStatus; import org.motechproject.nms.flw.exception.FlwExistingRecordException; import org.motechproject.nms.flw.exception.FlwImportException; import org.motechproject.nms.flw.service.FrontLineWorkerService; import org.motechproject.nms.kilkari.contract.RchAnmAshaRecord; import org.motechproject.nms.kilkari.contract.RchChildRecord; import org.motechproject.nms.kilkari.contract.RchDistrictRecord; import org.motechproject.nms.kilkari.contract.RchHealthBlockRecord; import org.motechproject.nms.kilkari.contract.RchHealthFacilityRecord; import org.motechproject.nms.kilkari.contract.RchHealthSubFacilityRecord; import org.motechproject.nms.kilkari.contract.RchMotherRecord; import org.motechproject.nms.kilkari.contract.RchTalukaHealthBlockRecord; import org.motechproject.nms.kilkari.contract.RchTalukaRecord; import org.motechproject.nms.kilkari.contract.RchVillageHealthSubFacilityRecord; import org.motechproject.nms.kilkari.contract.RchVillageRecord; import org.motechproject.nms.kilkari.domain.RejectionReasons; import org.motechproject.nms.kilkari.domain.SubscriptionOrigin; import org.motechproject.nms.kilkari.domain.MctsMother; import org.motechproject.nms.kilkari.domain.MctsChild; import org.motechproject.nms.kilkari.domain.SubscriptionPackType; import org.motechproject.nms.kilkari.domain.ThreadProcessorObject; import org.motechproject.nms.kilkari.service.MctsBeneficiaryImportReaderService; import org.motechproject.nms.kilkari.service.ChildCsvThreadProcessor; import org.motechproject.nms.kilkari.service.MotherCsvThreadProcessor; import org.motechproject.nms.kilkari.utils.FlwConstants; import org.motechproject.nms.flwUpdate.service.FrontLineWorkerImportService; import org.motechproject.nms.kilkari.service.MctsBeneficiaryImportService; import org.motechproject.nms.kilkari.service.MctsBeneficiaryValueProcessor; import org.motechproject.nms.kilkari.utils.KilkariConstants; import org.motechproject.nms.rch.contract.RchAnmAshaDataSet; import org.motechproject.nms.rch.contract.RchChildrenDataSet; import org.motechproject.nms.rch.contract.RchDistrictDataSet; import org.motechproject.nms.rch.contract.RchHealthBlockDataSet; import org.motechproject.nms.rch.contract.RchHealthFacilityDataSet; import org.motechproject.nms.rch.contract.RchHealthSubFacilityDataSet; import org.motechproject.nms.rch.contract.RchMothersDataSet; import org.motechproject.nms.rch.contract.RchTalukaDataSet; import org.motechproject.nms.rch.contract.RchTalukaHealthBlockDataSet; import org.motechproject.nms.rch.contract.RchVillageDataSet; import org.motechproject.nms.rch.contract.RchVillageHealthSubFacilityDataSet; import org.motechproject.nms.rch.domain.RchImportAudit; import org.motechproject.nms.rch.domain.RchImportFacilitator; import org.motechproject.nms.rch.domain.RchImportFailRecord; import org.motechproject.nms.rch.domain.RchUserType; import org.motechproject.nms.rch.exception.ExecutionException; import org.motechproject.nms.rch.exception.RchFileManipulationException; import org.motechproject.nms.rch.exception.RchInvalidResponseStructureException; import org.motechproject.nms.rch.exception.RchWebServiceException; import org.motechproject.nms.rch.repository.RchImportAuditDataService; import org.motechproject.nms.rch.repository.RchImportFacilitatorDataService; import org.motechproject.nms.rch.repository.RchImportFailRecordDataService; import org.motechproject.nms.rch.service.RchImportFacilitatorService; import org.motechproject.nms.rch.service.RchWebServiceFacade; import org.motechproject.nms.rch.soap.DS_DataResponseDS_DataResult; import org.motechproject.nms.rch.soap.Irchwebservices; import org.motechproject.nms.rch.soap.RchwebservicesLocator; import org.motechproject.nms.rch.utils.Constants; import org.motechproject.nms.rch.utils.ExecutionHelper; import org.motechproject.nms.rch.utils.MarshallUtils; import org.motechproject.nms.region.domain.HealthBlock; import org.motechproject.nms.region.domain.HealthFacility; import org.motechproject.nms.region.domain.HealthSubFacility; import org.motechproject.nms.region.domain.LocationEnum; import org.motechproject.nms.region.domain.LocationFinder; import org.motechproject.nms.region.domain.State; import org.motechproject.nms.region.domain.Taluka; import org.motechproject.nms.region.domain.Village; import org.motechproject.nms.region.exception.InvalidLocationException; import org.motechproject.nms.region.repository.StateDataService; import org.motechproject.nms.region.service.LocationService; import org.motechproject.nms.rejectionhandler.domain.ChildImportRejection; import org.motechproject.nms.rejectionhandler.domain.MotherImportRejection; import org.motechproject.nms.rejectionhandler.service.FlwRejectionService; import org.motechproject.server.config.SettingsFacade; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.mock.web.MockMultipartFile; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.springframework.web.multipart.MultipartFile; import org.supercsv.cellprocessor.ift.CellProcessor; import org.xml.sax.helpers.AttributesImpl; import javax.jdo.Query; import javax.xml.bind.JAXBException; import javax.xml.namespace.QName; import javax.xml.rpc.ServiceException; import java.io.File; import java.io.FileInputStream; import java.io.FileWriter; import java.io.FileReader; import java.io.BufferedReader; import java.io.IOException; import java.io.StringWriter; import java.io.InputStream; import java.io.InputStreamReader; import java.lang.reflect.Method; import java.net.URL; import java.rmi.RemoteException; import java.text.ParseException; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.ArrayList; import java.util.Date; import java.util.Objects; import java.text.SimpleDateFormat; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import static org.motechproject.nms.kilkari.utils.ObjectListCleaner.cleanRchMotherRecords; import static org.motechproject.nms.kilkari.utils.ObjectListCleaner.cleanRchChildRecords; import static org.motechproject.nms.kilkari.utils.ObjectListCleaner.cleanRchFlwRecords; import static org.motechproject.nms.kilkari.utils.RejectedObjectConverter.childRejectionRch; import static org.motechproject.nms.kilkari.utils.RejectedObjectConverter.convertMapToRchChild; import static org.motechproject.nms.kilkari.utils.RejectedObjectConverter.convertMapToRchMother; import static org.motechproject.nms.kilkari.utils.RejectedObjectConverter.motherRejectionRch; import static org.motechproject.nms.kilkari.utils.RejectedObjectConverter.flwRejectionRch; @Service("rchWebServiceFacade") public class RchWebServiceFacadeImpl implements RchWebServiceFacade { private static final String DATE_FORMAT = "dd-MM-yyyy"; private static final String LOCAL_RESPONSE_DIR = "rch.local_response_dir"; private static final String REMOTE_RESPONSE_DIR = "rch.remote_response_dir"; private static final String REMOTE_RESPONSE_DIR_CSV = "rch.remote_response_dir_csv"; private static final String REMOTE_RESPONSE_DIR_XML = "rch.remote_response_dir_xml"; private static final String LOC_UPDATE_DIR_RCH = "rch.loc_update_dir"; private static final String REMOTE_RESPONSE_DIR_LOCATION = "rch.remote_response_dir_locations"; private static final String NEXT_LINE = "\r\n"; private static final String TAB = "\t"; private static final Integer LOCATION_PART_SIZE = 5000; private static final String RECORDS = "Records"; private static final String QUOTATION = "'"; private static final String SQL_QUERY_LOG = "SQL QUERY: {}"; private static final String FROM_DATE_LOG = "fromdate {}"; private static final String SCP_ERROR = "error copying file to remote server."; private static final DateTimeFormatter TIME_FORMATTER = DateTimeFormat.forPattern("dd-MM-yyyy"); private static final String SCP_TIMEOUT_SETTING = "rch.scp_timeout"; private static final Long SCP_TIME_OUT = 60000L; private static final String RCH_WEB_SERVICE = "RCH Web Service"; private static final String BULK_REJECTION_ERROR_MESSAGE = "Error while bulk updating rejection records"; private static final double THOUSAND = 1000d; @Autowired @Qualifier("rchSettings") private SettingsFacade settingsFacade; @Autowired @Qualifier("rchServiceLocator") private RchwebservicesLocator rchServiceLocator; private static final Logger LOGGER = LoggerFactory.getLogger(RchWebServiceFacadeImpl.class); @Autowired private RchImportAuditDataService rchImportAuditDataService; @Autowired private RchImportFacilitatorDataService rchImportFacilitatorDataService; @Autowired private RchImportFacilitatorService rchImportFacilitatorService; @Autowired private StateDataService stateDataService; @Autowired private FrontLineWorkerImportService frontLineWorkerImportService; @Autowired private RchImportFailRecordDataService rchImportFailRecordDataService; @Autowired private AlertService alertService; @Autowired private MctsBeneficiaryValueProcessor mctsBeneficiaryValueProcessor; @Autowired private MctsBeneficiaryImportService mctsBeneficiaryImportService; @Autowired private MctsBeneficiaryImportReaderService mctsBeneficiaryImportReaderService; @Autowired private FlwRejectionService flwRejectionService; @Autowired private FrontLineWorkerService frontLineWorkerService; @Autowired private LocationService locationService; @Autowired private EventRelay eventRelay; @Override public boolean getMothersData(LocalDate from, LocalDate to, URL endpoint, Long stateId) { DS_DataResponseDS_DataResult result; Irchwebservices dataService = getService(endpoint); boolean status = false; LOGGER.info(FROM_DATE_LOG, from); try { result = dataService.DS_Data(settingsFacade.getProperty(Constants.RCH_PROJECT_ID), settingsFacade.getProperty(Constants.RCH_USER_ID), settingsFacade.getProperty(Constants.RCH_PASSWORD), from.toString(DATE_FORMAT), to.toString(DATE_FORMAT), stateId.toString(), settingsFacade.getProperty(Constants.RCH_MOTHER_USER), settingsFacade.getProperty(Constants.RCH_DTID)); } catch (RemoteException e) { throw new RchWebServiceException("Remote Server Error. Could Not Read RCH Mother Data.", e); } LOGGER.debug("writing RCH mother response to file"); File responseFile = generateResponseFile(result, RchUserType.MOTHER, stateId); if (responseFile != null) { LOGGER.info("RCH mother response successfully written to file. Copying to remote directory."); try { scpResponseToRemote(responseFile.getName()); LOGGER.info("RCH mother response file successfully copied to remote server"); RchImportFacilitator rchImportFacilitator = new RchImportFacilitator(responseFile.getName(), from, to, stateId, RchUserType.MOTHER, LocalDate.now()); rchImportFacilitatorService.createImportFileAudit(rchImportFacilitator); status = true; } catch (ExecutionException e) { LOGGER.error(SCP_ERROR, e); } catch (RchFileManipulationException e) { LOGGER.error("invalid file name", e); } } else { LOGGER.error("Error writing response to file."); } return status; } @Override public boolean getDistrictData(LocalDate from, LocalDate to, URL endpoint, Long stateId) { DS_DataResponseDS_DataResult result; Irchwebservices dataService = getService(endpoint); boolean status = false; LOGGER.info(FROM_DATE_LOG, from); try { result = dataService.DS_Data(settingsFacade.getProperty(Constants.RCH_PROJECT_ID), settingsFacade.getProperty(Constants.RCH_USER_ID), settingsFacade.getProperty(Constants.RCH_PASSWORD), from.toString(DATE_FORMAT), to.toString(DATE_FORMAT), stateId.toString(), settingsFacade.getProperty(Constants.RCH_LOCATION_DISTRICT), settingsFacade.getProperty(Constants.RCH_DTID)); } catch (RemoteException e) { throw new RchWebServiceException("Remote Server Error. Could Not Read RCH District Data.", e); } LOGGER.debug("writing RCH District response to file"); File responseFile = generateResponseFile(result, RchUserType.DISTRICT, stateId); if (responseFile != null) { LOGGER.info("RCH district response successfully written to file. Copying to remote directory."); try { scpResponseToRemote(responseFile.getName()); LOGGER.info("RCH district response file successfully copied to remote server"); RchImportFacilitator rchImportFacilitator = new RchImportFacilitator(responseFile.getName(), from, to, stateId, RchUserType.DISTRICT, LocalDate.now()); rchImportFacilitatorService.createImportFileAudit(rchImportFacilitator); status = true; } catch (ExecutionException e) { LOGGER.error(SCP_ERROR, e); } catch (RchFileManipulationException e) { LOGGER.error("invalid file name", e); } } else { LOGGER.error("Error writing response to file."); } return status; } @Override public boolean getTalukasData(LocalDate from, LocalDate to, URL endpoint, Long stateId) { DS_DataResponseDS_DataResult result; Irchwebservices dataService = getService(endpoint); boolean status = false; LOGGER.info(FROM_DATE_LOG, from); try { result = dataService.DS_Data(settingsFacade.getProperty(Constants.RCH_PROJECT_ID), settingsFacade.getProperty(Constants.RCH_USER_ID), settingsFacade.getProperty(Constants.RCH_PASSWORD), from.toString(DATE_FORMAT), to.toString(DATE_FORMAT), stateId.toString(), settingsFacade.getProperty(Constants.RCH_LOCATION_TALUKA), settingsFacade.getProperty(Constants.RCH_DTID)); } catch (RemoteException e) { throw new RchWebServiceException("Remote Server Error. Could Not Read RCH Taluka Data.", e); } LOGGER.debug("writing RCH taluka response to file"); File responseFile = generateResponseFile(result, RchUserType.TALUKA, stateId); if (responseFile != null) { LOGGER.info("RCH taluka response successfully written to file. Copying to remote directory."); try { scpResponseToRemote(responseFile.getName()); LOGGER.info("RCH taluka response file successfully copied to remote server"); RchImportFacilitator rchImportFacilitator = new RchImportFacilitator(responseFile.getName(), from, to, stateId, RchUserType.TALUKA, LocalDate.now()); rchImportFacilitatorService.createImportFileAudit(rchImportFacilitator); status = true; } catch (ExecutionException e) { LOGGER.error(SCP_ERROR, e); } catch (RchFileManipulationException e) { LOGGER.error("invalid file name", e); } } else { LOGGER.error("Error writing response to file."); } return status; } @Override public boolean getVillagesData(LocalDate from, LocalDate to, URL endpoint, Long stateId) { DS_DataResponseDS_DataResult result; Irchwebservices dataService = getService(endpoint); boolean status = false; LOGGER.info(FROM_DATE_LOG, from); try { result = dataService.DS_Data(settingsFacade.getProperty(Constants.RCH_PROJECT_ID), settingsFacade.getProperty(Constants.RCH_USER_ID), settingsFacade.getProperty(Constants.RCH_PASSWORD), from.toString(DATE_FORMAT), to.toString(DATE_FORMAT), stateId.toString(), settingsFacade.getProperty(Constants.RCH_LOCATION_VILLAGE), settingsFacade.getProperty(Constants.RCH_DTID)); } catch (RemoteException e) { throw new RchWebServiceException("Remote Server Error. Could Not Read RCH Village Data.", e); } LOGGER.debug("writing RCH Village response to file"); File responseFile = generateResponseFile(result, RchUserType.VILLAGE, stateId); if (responseFile != null) { LOGGER.info("RCH Village response successfully written to file. Copying to remote directory."); try { scpResponseToRemote(responseFile.getName()); LOGGER.info("RCH Village response file successfully copied to remote server"); RchImportFacilitator rchImportFacilitator = new RchImportFacilitator(responseFile.getName(), from, to, stateId, RchUserType.VILLAGE, LocalDate.now()); rchImportFacilitatorService.createImportFileAudit(rchImportFacilitator); status = true; } catch (ExecutionException e) { LOGGER.error(SCP_ERROR, e); } catch (RchFileManipulationException e) { LOGGER.error("invalid file name", e); } } else { LOGGER.error("Error writing response to file."); } return status; } @MotechListener(subjects = Constants.RCH_LOCATION_READ_SUBJECT) //NO CHECKSTYLE Cyclomatic Complexity @Transactional public void readLocationResponse(MotechEvent event) throws RchFileManipulationException { LOGGER.info("Starting location read."); List<Long> stateIds = getStateIds(); for (Long stateId : stateIds ) { Map<String, Object> eventParams = new HashMap<>(); eventParams.put(Constants.STATE_ID_PARAM, stateId); eventRelay.sendEventMessage(new MotechEvent(Constants.RCH_DISTRICT_READ_SUBJECT, eventParams)); } } @MotechListener(subjects = Constants.RCH_DISTRICT_READ_SUBJECT) //NO CHECKSTYLE Cyclomatic Complexity @Transactional public void readDistrictResponseFromFile(MotechEvent event) throws RchFileManipulationException { Long stateId = (Long) event.getParameters().get(Constants.STATE_ID_PARAM); LOGGER.info("Copying RCH district response file from remote server to local directory."); try { List<RchImportFacilitator> rchImportFacilitatorsDistricts = rchImportFacilitatorService.findByImportDateStateIdAndRchUserType(stateId, LocalDate.now(), RchUserType.DISTRICT); for (RchImportFacilitator rchImportFacilitatorsDistrict: rchImportFacilitatorsDistricts ) { ArrayList<Map<String, Object>> districtArrList = new ArrayList<>(); File localResponseFile = scpResponseToLocal(rchImportFacilitatorsDistrict.getFileName()); if (localResponseFile != null) { LOGGER.info("RCH district response file successfully copied from remote server to local directory."); String result = readResponsesFromXml(localResponseFile); State state = stateDataService.findByCode(stateId); String stateName = state.getName() != null ? state.getName() : " "; Long stateCode = state.getCode() != null ? state.getCode() : 1L; LocalDate startDate = rchImportFacilitatorsDistrict.getStartDate(); LocalDate endDate = rchImportFacilitatorsDistrict.getEndDate(); try { if (result.contains(RECORDS)) { RchDistrictDataSet districtDataSet = (result == null) ? null : (RchDistrictDataSet) MarshallUtils.unmarshall(result, RchDistrictDataSet.class); LOGGER.info("Starting RCH district import"); StopWatch stopWatch = new StopWatch(); stopWatch.start(); if (districtDataSet == null || districtDataSet.getRecords() == null) { String warning = String.format("No district data set received from RCH for %s state", stateName); LOGGER.warn(warning); rchImportAuditDataService.create(new RchImportAudit(startDate, endDate, RchUserType.DISTRICT, stateCode, stateName, 0, 0, warning)); } else { List<RchDistrictRecord> districtRecords = districtDataSet.getRecords(); for (RchDistrictRecord record : districtRecords) { Map<String, Object> locMap = new HashMap<>(); toMapDistrict(locMap, record, stateCode); districtArrList.add(locMap); } } int count = 0; int partNumber = 0; Long totalUpdatedRecords = 0L; while (count < districtArrList.size()) { List<Map<String, Object>> recordListPart = new ArrayList<>(); while (recordListPart.size() < LOCATION_PART_SIZE && count < districtArrList.size()) { recordListPart.add(districtArrList.get(count)); count++; } partNumber++; totalUpdatedRecords += locationService.createLocationPart(recordListPart, LocationEnum.DISTRICT, rchImportFacilitatorsDistrict.getFileName(), partNumber); recordListPart.clear(); } LOGGER.debug("File {} processed. {} records updated", rchImportFacilitatorsDistrict.getFileName(), totalUpdatedRecords); } else { String warning = String.format("No district data set received from RCH for %d stateId", stateId); LOGGER.warn(warning); } } catch (JAXBException e) { throw new RchInvalidResponseStructureException(String.format("Cannot deserialize RCH district data from %s location.", stateId), e); } catch (RchInvalidResponseStructureException e) { String error = String.format("Cannot read RCH districts data from %s state with stateId: %d. Response Deserialization Error", stateName, stateId); LOGGER.error(error, e); alertService.create(RCH_WEB_SERVICE, "RCH Web Service District Import", e .getMessage() + " " + error, AlertType.CRITICAL, AlertStatus.NEW, 0, null); rchImportAuditDataService.create(new RchImportAudit(startDate, endDate, RchUserType.DISTRICT, stateCode, stateName, 0, 0, error)); rchImportFailRecordDataService.create(new RchImportFailRecord(endDate, RchUserType.DISTRICT, stateId)); } catch (NullPointerException e) { LOGGER.error("No files saved a : ", e); } } } } catch (ExecutionException e) { LOGGER.error("Failed to copy file from remote server to local directory." + e); } finally { Map<String, Object> eventParams = new HashMap<>(); eventParams.put(Constants.STATE_ID_PARAM, stateId); eventRelay.sendEventMessage(new MotechEvent(Constants.RCH_TALUKA_READ_SUBJECT, eventParams)); } } @MotechListener(subjects = Constants.RCH_TALUKA_READ_SUBJECT) //NO CHECKSTYLE Cyclomatic Complexity @Transactional public void readTalukaResponseFromFile(MotechEvent event) throws RchFileManipulationException { LOGGER.info("Copying RCH taluka response file from remote server to local directory."); Long stateId = (Long) event.getParameters().get(Constants.STATE_ID_PARAM); try { List<RchImportFacilitator> rchImportFacilitatorsTalukas = rchImportFacilitatorService.findByImportDateStateIdAndRchUserType(stateId, LocalDate.now(), RchUserType.TALUKA); for (RchImportFacilitator rchImportFacilitatorsTaluka: rchImportFacilitatorsTalukas ) { ArrayList<Map<String, Object>> talukaArrList = new ArrayList<>(); File localResponseFile = scpResponseToLocal(rchImportFacilitatorsTaluka.getFileName()); if (localResponseFile != null) { LOGGER.info("RCH Taluka response file successfully copied from remote server to local directory."); String result = readResponsesFromXml(localResponseFile); State state = stateDataService.findByCode(stateId); String stateName = state.getName() != null ? state.getName() : " "; Long stateCode = state.getCode() != null ? state.getCode() : 1L; LocalDate startDate = rchImportFacilitatorsTaluka.getStartDate(); LocalDate endDate = rchImportFacilitatorsTaluka.getEndDate(); try { if (result.contains(RECORDS)) { RchTalukaDataSet talukaDataSet = (result == null) ? null : (RchTalukaDataSet) MarshallUtils.unmarshall(result, RchTalukaDataSet.class); LOGGER.info("Starting RCH taluka import"); StopWatch stopWatch = new StopWatch(); stopWatch.start(); if (talukaDataSet == null || talukaDataSet.getRecords() == null) { String warning = String.format("No taluka data set received from RCH for %s state", stateName); LOGGER.warn(warning); rchImportAuditDataService.create(new RchImportAudit(startDate, endDate, RchUserType.TALUKA, stateCode, stateName, 0, 0, warning)); } else { List<RchTalukaRecord> talukaRecords = talukaDataSet.getRecords(); for (RchTalukaRecord record : talukaRecords) { Map<String, Object> locMap = new HashMap<>(); toMapTaluka(locMap, record, stateCode); talukaArrList.add(locMap); } } int count = 0; int partNumber = 0; Long totalUpdatedRecords = 0L; while (count < talukaArrList.size()) { List<Map<String, Object>> recordListPart = new ArrayList<>(); while (recordListPart.size() < LOCATION_PART_SIZE && count < talukaArrList.size()) { recordListPart.add(talukaArrList.get(count)); count++; } partNumber++; totalUpdatedRecords += locationService.createLocationPart(recordListPart, LocationEnum.TALUKA, rchImportFacilitatorsTaluka.getFileName(), partNumber); recordListPart.clear(); } LOGGER.debug("File {} processed. {} records updated", rchImportFacilitatorsTaluka.getFileName(), totalUpdatedRecords); } else { String warning = String.format("No Taluka data set received from RCH for %d stateId", stateId); LOGGER.warn(warning); } } catch (JAXBException e) { throw new RchInvalidResponseStructureException(String.format("Cannot deserialize RCH taluka data from %s location.", stateId), e); } catch (RchInvalidResponseStructureException e) { String error = String.format("Cannot read RCH taluka data from %s state with stateId: %d. Response Deserialization Error", stateName, stateId); LOGGER.error(error, e); alertService.create(RCH_WEB_SERVICE, "RCH Web Service taluka Import", e .getMessage() + " " + error, AlertType.CRITICAL, AlertStatus.NEW, 0, null); rchImportAuditDataService.create(new RchImportAudit(startDate, endDate, RchUserType.TALUKA, stateCode, stateName, 0, 0, error)); rchImportFailRecordDataService.create(new RchImportFailRecord(endDate, RchUserType.TALUKA, stateId)); } catch (NullPointerException e) { LOGGER.error("No files saved a : ", e); } } } } catch (ExecutionException e) { LOGGER.error("Failed to copy file from remote server to local directory." + e); } finally { Map<String, Object> eventParams = new HashMap<>(); eventParams.put(Constants.STATE_ID_PARAM, stateId); eventRelay.sendEventMessage(new MotechEvent(Constants.RCH_HEALTHBLOCK_READ_SUBJECT, eventParams)); } } @MotechListener(subjects = Constants.RCH_VILLAGE_READ_SUBJECT) //NO CHECKSTYLE Cyclomatic Complexity @Transactional public void readVillageResponseFromFile(MotechEvent event) throws RchFileManipulationException { Long stateId = (Long) event.getParameters().get(Constants.STATE_ID_PARAM); LOGGER.info("Copying RCH village response file from remote server to local directory."); try { List<RchImportFacilitator> rchImportFacilitatorsVillages = rchImportFacilitatorService.findByImportDateStateIdAndRchUserType(stateId, LocalDate.now(), RchUserType.VILLAGE); for (RchImportFacilitator rchImportFacilitatorsVillage: rchImportFacilitatorsVillages ) { ArrayList<Map<String, Object>> villageArrList = new ArrayList<>(); File localResponseFile = scpResponseToLocal(rchImportFacilitatorsVillage.getFileName()); if (localResponseFile != null) { LOGGER.info("RCH village response file successfully copied from remote server to local directory."); String result = readResponsesFromXml(localResponseFile); State state = stateDataService.findByCode(stateId); String stateName = state.getName() != null ? state.getName() : " "; Long stateCode = state.getCode() != null ? state.getCode() : 1L; LocalDate startDate = rchImportFacilitatorsVillage.getStartDate(); LocalDate endDate = rchImportFacilitatorsVillage.getEndDate(); try { if (result.contains(RECORDS)) { RchVillageDataSet villageDataSet = (result == null) ? null : (RchVillageDataSet) MarshallUtils.unmarshall(result, RchVillageDataSet.class); LOGGER.info("Starting RCH village import"); StopWatch stopWatch = new StopWatch(); stopWatch.start(); if (villageDataSet == null || villageDataSet.getRecords() == null) { String warning = String.format("No village data set received from RCH for %s state", stateName); LOGGER.warn(warning); rchImportAuditDataService.create(new RchImportAudit(startDate, endDate, RchUserType.VILLAGE, stateCode, stateName, 0, 0, warning)); } else { List<RchVillageRecord> villageRecords = villageDataSet.getRecords(); for (RchVillageRecord record : villageRecords) { Map<String, Object> locMap = new HashMap<>(); toMapVillage(locMap, record, stateCode); villageArrList.add(locMap); } } int count = 0; int partNumber = 0; Long totalUpdatedRecords = 0L; while (count < villageArrList.size()) { List<Map<String, Object>> recordListPart = new ArrayList<>(); while (recordListPart.size() < LOCATION_PART_SIZE && count < villageArrList.size()) { recordListPart.add(villageArrList.get(count)); count++; } partNumber++; totalUpdatedRecords += locationService.createLocationPart(recordListPart, LocationEnum.VILLAGE, rchImportFacilitatorsVillage.getFileName(), partNumber); recordListPart.clear(); } LOGGER.debug("File {} processed. {} records updated", rchImportFacilitatorsVillage.getFileName(), totalUpdatedRecords); } else { String warning = String.format("No Village data set received from RCH for %d stateId", stateId); LOGGER.warn(warning); } } catch (JAXBException e) { throw new RchInvalidResponseStructureException(String.format("Cannot deserialize RCH Village data from %s location.", stateId), e); } catch (RchInvalidResponseStructureException e) { String error = String.format("Cannot read RCH Village data from %s state with stateId: %d. Response Deserialization Error", stateName, stateId); LOGGER.error(error, e); alertService.create(RCH_WEB_SERVICE, "RCH Web Service Village Import", e .getMessage() + " " + error, AlertType.CRITICAL, AlertStatus.NEW, 0, null); rchImportAuditDataService.create(new RchImportAudit(startDate, endDate, RchUserType.VILLAGE, stateCode, stateName, 0, 0, error)); rchImportFailRecordDataService.create(new RchImportFailRecord(endDate, RchUserType.VILLAGE, stateId)); } catch (NullPointerException e) { LOGGER.error("No files saved a : ", e); } } } } catch (ExecutionException e) { LOGGER.error("Failed to copy file from remote server to local directory." + e); } finally { Map<String, Object> eventParams = new HashMap<>(); eventParams.put(Constants.STATE_ID_PARAM, stateId); eventRelay.sendEventMessage(new MotechEvent(Constants.RCH_VILLAGE_HEALTHSUBFACILITY_READ_SUBJECT, eventParams)); } } @MotechListener(subjects = Constants.RCH_MOTHER_READ_SUBJECT) //NO CHECKSTYLE Cyclomatic Complexity @Transactional public void readMotherResponse(MotechEvent event) throws RchFileManipulationException { LOGGER.info("Starting Mother read."); List<Long> stateIds = getStateIds(); for (Long stateId : stateIds ) { Map<String, Object> eventParams = new HashMap<>(); eventParams.put(Constants.STATE_ID_PARAM, stateId); eventRelay.sendEventMessage(new MotechEvent(Constants.RCH_MOTHER_READ, eventParams)); } } @MotechListener(subjects = Constants.RCH_MOTHER_READ) //NO CHECKSTYLE Cyclomatic Complexity public void readMotherResponseFromFile(MotechEvent event) throws RchFileManipulationException { Long stateId = (Long) event.getParameters().get(Constants.STATE_ID_PARAM); try { LOGGER.info("Copying RCH mother response file from remote server to local directory."); List<RchImportFacilitator> rchImportFacilitatorMothers = rchImportFacilitatorService.findByImportDateStateIdAndRchUserType(stateId, LocalDate.now(), RchUserType.MOTHER); for (RchImportFacilitator rchImportFacilitatorMother: rchImportFacilitatorMothers ) { File localResponseFile = scpResponseToLocal(rchImportFacilitatorMother.getFileName()); if (localResponseFile != null) { LOGGER.info("RCH Mother response file successfully copied from remote server to local directory."); String result = readResponsesFromXml(localResponseFile); State state = stateDataService.findByCode(stateId); String stateName = state.getName() != null ? state.getName() : " "; Long stateCode = state.getCode() != null ? state.getCode() : 1L; LocalDate startDate = rchImportFacilitatorMother.getStartDate(); LocalDate endDate = rchImportFacilitatorMother.getEndDate(); try { if (result.contains(RECORDS)) { RchMothersDataSet mothersDataSet = (result == null) ? null : (RchMothersDataSet) MarshallUtils.unmarshall(result, RchMothersDataSet.class); LOGGER.info("Starting RCH mother import"); StopWatch stopWatch = new StopWatch(); stopWatch.start(); if (mothersDataSet == null || mothersDataSet.getRecords() == null) { String warning = String.format("No mother data set received from RCH for %s state", stateName); LOGGER.warn(warning); rchImportAuditDataService.create(new RchImportAudit(startDate, endDate, RchUserType.MOTHER, stateCode, stateName, 0, 0, warning)); } else { LOGGER.info("Received {} mother records from RCH for {} state", sizeNullSafe(mothersDataSet.getRecords()), stateName); RchImportAudit audit = saveImportedMothersData(mothersDataSet, stateName, stateCode, startDate, endDate); rchImportAuditDataService.create(audit); stopWatch.stop(); double seconds = stopWatch.getTime() / THOUSAND; LOGGER.info("Finished RCH mother import dispatch in {} seconds. Accepted {} mothers, Rejected {} mothers", seconds, audit.getAccepted(), audit.getRejected()); LOGGER.info("fromDate for delete {} {}", startDate, endDate); deleteRchImportFailRecords(startDate, endDate, RchUserType.MOTHER, stateId); } } else { String warning = String.format("No Mother data set received from RCH for %d stateId", stateId); LOGGER.warn(warning); } } catch (JAXBException e) { throw new RchInvalidResponseStructureException(String.format("Cannot deserialize RCH mother data from %s location.", stateId), e); } catch (RchInvalidResponseStructureException e) { String error = String.format("Cannot read RCH mothers data from %s state with stateId: %d. Response Deserialization Error", stateName, stateId); LOGGER.error(error, e); alertService.create(RCH_WEB_SERVICE, "RCH Web Service Mother Import", e .getMessage() + " " + error, AlertType.CRITICAL, AlertStatus.NEW, 0, null); rchImportAuditDataService.create(new RchImportAudit(startDate, endDate, RchUserType.MOTHER, stateCode, stateName, 0, 0, error)); rchImportFailRecordDataService.create(new RchImportFailRecord(endDate, RchUserType.MOTHER, stateId)); } catch (NullPointerException e) { LOGGER.error("No files saved a : ", e); } } } } catch (ExecutionException e) { LOGGER.error("Failed to copy file from remote server to local directory." + e); } } @Override public boolean getChildrenData(LocalDate from, LocalDate to, URL endpoint, Long stateId) { DS_DataResponseDS_DataResult result; Irchwebservices dataService = getService(endpoint); boolean status = false; try { result = dataService.DS_Data(settingsFacade.getProperty(Constants.RCH_PROJECT_ID), settingsFacade.getProperty(Constants.RCH_USER_ID), settingsFacade.getProperty(Constants.RCH_PASSWORD), from.toString(DATE_FORMAT), to.toString(DATE_FORMAT), stateId.toString(), settingsFacade.getProperty(Constants.RCH_CHILD_USER), settingsFacade.getProperty(Constants.RCH_DTID)); } catch (RemoteException e) { throw new RchWebServiceException("Remote Server Error. Could Not Read RCH Children Data.", e); } LOGGER.debug("writing RCH children response to file"); File responseFile = generateResponseFile(result, RchUserType.CHILD, stateId); if (responseFile != null) { LOGGER.info("RCH children response successfully written to file. Copying to remote directory."); try { scpResponseToRemote(responseFile.getName()); LOGGER.info("RCH children response file successfully copied to remote server"); RchImportFacilitator rchImportFacilitator = new RchImportFacilitator(responseFile.getName(), from, to, stateId, RchUserType.CHILD, LocalDate.now()); rchImportFacilitatorService.createImportFileAudit(rchImportFacilitator); status = true; } catch (ExecutionException e) { LOGGER.error(SCP_ERROR, e); } catch (RchFileManipulationException e) { LOGGER.error("invalid file error"); } } else { LOGGER.error("Error writing response to file."); } return status; } @MotechListener(subjects = Constants.RCH_CHILD_READ_SUBJECT) //NO CHECKSTYLE Cyclomatic Complexity @Transactional public void readChildResponse(MotechEvent event) throws RchFileManipulationException { LOGGER.info("Starting Child read."); List<Long> stateIds = getStateIds(); for (Long stateId : stateIds ) { Map<String, Object> eventParams = new HashMap<>(); eventParams.put(Constants.STATE_ID_PARAM, stateId); eventRelay.sendEventMessage(new MotechEvent(Constants.RCH_CHILD_READ, eventParams)); } } @MotechListener(subjects = Constants.RCH_CHILD_READ) public void readChildResponseFromFile(MotechEvent event) throws RchFileManipulationException { Long stateId = (Long) event.getParameters().get(Constants.STATE_ID_PARAM); LOGGER.info("Copying RCH child response file from remote server to local directory."); try { List<RchImportFacilitator> rchImportFacilitatorChildren = rchImportFacilitatorService.findByImportDateStateIdAndRchUserType(stateId, LocalDate.now(), RchUserType.CHILD); for (RchImportFacilitator rchImportFacilitatorChild: rchImportFacilitatorChildren ) { File localResponseFile = scpResponseToLocal(rchImportFacilitatorChild.getFileName()); String result = readResponsesFromXml(localResponseFile); State state = stateDataService.findByCode(stateId); String stateName = state.getName(); Long stateCode = state.getCode(); LocalDate startReferenceDate = rchImportFacilitatorChild.getStartDate(); LocalDate endReferenceDate = rchImportFacilitatorChild.getEndDate(); try { if (result.contains(RECORDS)) { RchChildrenDataSet childrenDataSet = (result == null) ? null : (RchChildrenDataSet) MarshallUtils.unmarshall(result, RchChildrenDataSet.class); LOGGER.info("Starting RCH children import for stateId: {}", stateId); StopWatch stopWatch = new StopWatch(); stopWatch.start(); if (childrenDataSet == null || childrenDataSet.getRecords() == null) { String warning = String.format("No child data set received from RCH for %s state", stateName); LOGGER.warn(warning); rchImportAuditDataService.create(new RchImportAudit(startReferenceDate, endReferenceDate, RchUserType.CHILD, stateCode, stateName, 0, 0, warning)); } else { LOGGER.info("Received {} children records from RCH for {} state", sizeNullSafe(childrenDataSet.getRecords()), stateName); RchImportAudit audit = saveImportedChildrenData(childrenDataSet, stateName, stateCode, startReferenceDate, endReferenceDate); rchImportAuditDataService.create(audit); stopWatch.stop(); double seconds = stopWatch.getTime() / THOUSAND; LOGGER.info("Finished children import dispatch in {} seconds. Accepted {} children, Rejected {} children", seconds, audit.getAccepted(), audit.getRejected()); // Delete RchImportFailRecords once import is successful deleteRchImportFailRecords(startReferenceDate, endReferenceDate, RchUserType.CHILD, stateId); } } else { String warning = String.format("No Child data set received from RCH for %d stateId", stateId); LOGGER.warn(warning); } } catch (JAXBException e) { throw new RchInvalidResponseStructureException(String.format("Cannot deserialize RCH children data from %s location.", stateId), e); } catch (RchInvalidResponseStructureException e) { String error = String.format("Cannot read RCH children data from %s state with stateId:%d. Response Deserialization Error", stateName, stateCode); LOGGER.error(error, e); alertService.create(RCH_WEB_SERVICE, "RCH Web Service Child Import", e.getMessage() + " " + error, AlertType.CRITICAL, AlertStatus.NEW, 0, null); rchImportAuditDataService.create(new RchImportAudit(startReferenceDate, endReferenceDate, RchUserType.CHILD, stateCode, stateName, 0, 0, error)); rchImportFailRecordDataService.create(new RchImportFailRecord(endReferenceDate, RchUserType.CHILD, stateId)); } catch (NullPointerException e) { LOGGER.error("No files saved b : ", e); } } } catch (ExecutionException e) { LOGGER.error("Failed to copy response file from remote server to local directory."); } } @Override public boolean getAnmAshaData(LocalDate from, LocalDate to, URL endpoint, Long stateId) { DS_DataResponseDS_DataResult result; Irchwebservices dataService = getService(endpoint); boolean status = false; try { result = dataService.DS_Data(settingsFacade.getProperty(Constants.RCH_PROJECT_ID), settingsFacade.getProperty(Constants.RCH_USER_ID), settingsFacade.getProperty(Constants.RCH_PASSWORD), from.toString(DATE_FORMAT), to.toString(DATE_FORMAT), stateId.toString(), settingsFacade.getProperty(Constants.RCH_ASHA_USER), settingsFacade.getProperty(Constants.RCH_DTID)); } catch (RemoteException e) { throw new RchWebServiceException("Remote Server Error. Could Not Read RCH FLW Data.", e); } LOGGER.debug("writing RCH Asha response to file"); File responseFile = generateResponseFile(result, RchUserType.ASHA, stateId); if (responseFile != null) { LOGGER.info("RCH asha response successfully written to file. Copying to remote directory."); try { scpResponseToRemote(responseFile.getName()); LOGGER.info("RCH asha response file successfully copied to remote server"); RchImportFacilitator rchImportFacilitator = new RchImportFacilitator(responseFile.getName(), from, to, stateId, RchUserType.ASHA, LocalDate.now()); rchImportFacilitatorService.createImportFileAudit(rchImportFacilitator); status = true; } catch (ExecutionException e) { LOGGER.error(SCP_ERROR,e); } catch (RchFileManipulationException e) { LOGGER.error("invalid file error",e); } } else { LOGGER.error("Error writing response to file."); } return status; } @MotechListener(subjects = Constants.RCH_ASHA_READ_SUBJECT) //NO CHECKSTYLE Cyclomatic Complexity @Transactional public void readASHAResponse(MotechEvent event) throws RchFileManipulationException { LOGGER.info("Starting Asha read."); List<Long> stateIds = getStateIds(); for (Long stateId : stateIds ) { Map<String, Object> eventParams = new HashMap<>(); eventParams.put(Constants.STATE_ID_PARAM, stateId); eventRelay.sendEventMessage(new MotechEvent(Constants.RCH_ASHA_READ, eventParams)); } } @MotechListener(subjects = Constants.RCH_ASHA_READ) public void readAshaResponseFromFile(MotechEvent event) throws RchFileManipulationException { Long stateId = (Long) event.getParameters().get(Constants.STATE_ID_PARAM); LOGGER.info("RCH Asha file import entry point"); LOGGER.info("Copying RCH Asha response file from remote server to local directory."); try { List<RchImportFacilitator> rchImportFacilitatorAshas = rchImportFacilitatorService.findByImportDateStateIdAndRchUserType(stateId, LocalDate.now(), RchUserType.ASHA); for (RchImportFacilitator rchImportFacilitatorAsha: rchImportFacilitatorAshas ) { File localResponseFile = scpResponseToLocal(rchImportFacilitatorAsha.getFileName()); String result = readResponsesFromXml(localResponseFile); State importState = stateDataService.findByCode(stateId); String stateName = importState.getName(); Long stateCode = importState.getCode(); LocalDate startReferenceDate = rchImportFacilitatorAsha.getStartDate(); LocalDate endReferenceDate = rchImportFacilitatorAsha.getEndDate(); try { if (result.contains(RECORDS)) { RchAnmAshaDataSet ashaDataSet = (result == null) ? null : (RchAnmAshaDataSet) MarshallUtils.unmarshall(result, RchAnmAshaDataSet.class); LOGGER.info("Starting RCH FLW import for stateId: {}", stateId); StopWatch stopWatch = new StopWatch(); stopWatch.start(); if (ashaDataSet == null || ashaDataSet.getRecords() == null) { String warning = String.format("No FLW data set received from RCH for %s state", stateName); LOGGER.warn(warning); rchImportAuditDataService.create(new RchImportAudit(startReferenceDate, endReferenceDate, RchUserType.ASHA, stateCode, stateName, 0, 0, warning)); } else { LOGGER.info("Received {} FLW records from RCH for {} state", sizeNullSafe(ashaDataSet.getRecords()), stateName); RchImportAudit audit = saveImportedAshaData(ashaDataSet, stateName, stateCode, startReferenceDate, endReferenceDate); rchImportAuditDataService.create(audit); stopWatch.stop(); double seconds = stopWatch.getTime() / THOUSAND; LOGGER.info("Finished RCH FLW import dispatch in {} seconds. Accepted {} Ashas, Rejected {} Ashas", seconds, audit.getAccepted(), audit.getRejected()); // Delete RchImportFailRecords once import is successful deleteRchImportFailRecords(startReferenceDate, endReferenceDate, RchUserType.ASHA, stateId); } } else { String warning = String.format("No Asha data set received from RCH for %d stateId", stateId); LOGGER.warn(warning); } } catch (JAXBException e) { throw new RchInvalidResponseStructureException(String.format("Cannot deserialize RCH FLW data from %s location.", stateId), e); } catch (RchInvalidResponseStructureException e) { String error = String.format("Cannot read RCH FLW data from %s state with stateId:%d. Response Deserialization Error", stateName, stateCode); LOGGER.error(error, e); alertService.create(RCH_WEB_SERVICE, "RCH Web Service FLW Import", e.getMessage() + " " + error, AlertType.CRITICAL, AlertStatus.NEW, 0, null); rchImportAuditDataService.create(new RchImportAudit(startReferenceDate, endReferenceDate, RchUserType.ASHA, stateCode, stateName, 0, 0, error)); rchImportFailRecordDataService.create(new RchImportFailRecord(endReferenceDate, RchUserType.ASHA, stateId)); } catch (NullPointerException e) { LOGGER.error("No files saved c : ", e); } } } catch (ExecutionException e) { LOGGER.error("Failed to copy response file from remote server to local directory."); } } @Override public boolean getHealthBlockData(LocalDate from, LocalDate to, URL endpoint, Long stateId) { DS_DataResponseDS_DataResult result; Irchwebservices dataService = getService(endpoint); boolean status = false; LOGGER.info(FROM_DATE_LOG, from); try { result = dataService.DS_Data(settingsFacade.getProperty(Constants.RCH_PROJECT_ID), settingsFacade.getProperty(Constants.RCH_USER_ID), settingsFacade.getProperty(Constants.RCH_PASSWORD), from.toString(DATE_FORMAT), to.toString(DATE_FORMAT), stateId.toString(), settingsFacade.getProperty(Constants.RCH_LOCATION_HEALTHBLOCK), settingsFacade.getProperty(Constants.RCH_DTID)); } catch (RemoteException e) { throw new RchWebServiceException("Remote Server Error. Could Not Read RCH healthblock Data.", e); } LOGGER.debug("writing RCH taluka response to file"); File responseFile = generateResponseFile(result, RchUserType.HEALTHBLOCK, stateId); if (responseFile != null) { LOGGER.info("RCH healthblock response successfully written to file. Copying to remote directory."); try { scpResponseToRemote(responseFile.getName()); LOGGER.info("RCH healthblock response file successfully copied to remote server"); RchImportFacilitator rchImportFacilitator = new RchImportFacilitator(responseFile.getName(), from, to, stateId, RchUserType.HEALTHBLOCK, LocalDate.now()); rchImportFacilitatorService.createImportFileAudit(rchImportFacilitator); status = true; } catch (ExecutionException e) { LOGGER.error(SCP_ERROR, e); } catch (RchFileManipulationException e) { LOGGER.error("invalid file name", e); } } else { LOGGER.error("Error writing response to file."); } return status; } @MotechListener(subjects = Constants.RCH_HEALTHBLOCK_READ_SUBJECT) //NO CHECKSTYLE Cyclomatic Complexity @Transactional public void readHealthBlockResponseFromFile(MotechEvent event) throws RchFileManipulationException { LOGGER.info("Copying RCH healthblock response file from remote server to local directory."); Long stateId = (Long) event.getParameters().get(Constants.STATE_ID_PARAM); try { List<RchImportFacilitator> rchImportFacilitatorsHealthBlocks = rchImportFacilitatorService.findByImportDateStateIdAndRchUserType(stateId, LocalDate.now(), RchUserType.HEALTHBLOCK); for (RchImportFacilitator rchImportFacilitatorsHealthBlock: rchImportFacilitatorsHealthBlocks ) { ArrayList<Map<String, Object>> healthBlockArrList = new ArrayList<>(); File localResponseFile = scpResponseToLocal(rchImportFacilitatorsHealthBlock.getFileName()); if (localResponseFile != null) { LOGGER.info("RCH healthblock response file successfully copied from remote server to local directory."); String result = readResponsesFromXml(localResponseFile); LOGGER.debug("stateId={}", stateId); State state = stateDataService.findByCode(stateId); String stateName = state.getName() != null ? state.getName() : " "; Long stateCode = state.getCode() != null ? state.getCode() : 1L; LOGGER.debug("stateCode={}", stateCode); LocalDate startDate = rchImportFacilitatorsHealthBlock.getStartDate(); LocalDate endDate = rchImportFacilitatorsHealthBlock.getEndDate(); try { if (result.contains(RECORDS)) { RchHealthBlockDataSet healthBlockDataSet = (result == null) ? null : (RchHealthBlockDataSet) MarshallUtils.unmarshall(result, RchHealthBlockDataSet.class); LOGGER.info("Starting RCH healthblock import"); StopWatch stopWatch = new StopWatch(); stopWatch.start(); if (healthBlockDataSet == null || healthBlockDataSet.getRecords() == null) { String warning = String.format("No healthblock data set received from RCH for %s state", stateName); LOGGER.warn(warning); rchImportAuditDataService.create(new RchImportAudit(startDate, endDate, RchUserType.HEALTHBLOCK, stateCode, stateName, 0, 0, warning)); } else { List<RchHealthBlockRecord> rchHealthBlockRecords = healthBlockDataSet.getRecords(); for (RchHealthBlockRecord record : rchHealthBlockRecords) { Map<String, Object> locMap = new HashMap<>(); toMapHealthBlock(locMap, record, stateCode); healthBlockArrList.add(locMap); } } int count = 0; int partNumber = 0; Long totalUpdatedRecords = 0L; while (count < healthBlockArrList.size()) { List<Map<String, Object>> recordListPart = new ArrayList<>(); while (recordListPart.size() < LOCATION_PART_SIZE && count < healthBlockArrList.size()) { recordListPart.add(healthBlockArrList.get(count)); count++; } partNumber++; totalUpdatedRecords += locationService.createLocationPart(recordListPart, LocationEnum.HEALTHBLOCK, rchImportFacilitatorsHealthBlock.getFileName(), partNumber); recordListPart.clear(); } LOGGER.debug("File {} processed. {} records updated", rchImportFacilitatorsHealthBlock.getFileName(), totalUpdatedRecords); } else { String warning = String.format("No HealthBlock data set received from RCH for %d stateId", stateId); LOGGER.warn(warning); } } catch (JAXBException e) { throw new RchInvalidResponseStructureException(String.format("Cannot deserialize RCH mother data from %s location.", stateId), e); } catch (RchInvalidResponseStructureException e) { String error = String.format("Cannot read RCH mothers data from %s state with stateId: %d. Response Deserialization Error", stateName, stateId); LOGGER.error(error, e); alertService.create(RCH_WEB_SERVICE, "RCH Web Service Mother Import", e .getMessage() + " " + error, AlertType.CRITICAL, AlertStatus.NEW, 0, null); rchImportAuditDataService.create(new RchImportAudit(startDate, endDate, RchUserType.MOTHER, stateCode, stateName, 0, 0, error)); rchImportFailRecordDataService.create(new RchImportFailRecord(endDate, RchUserType.MOTHER, stateId)); } catch (NullPointerException e) { LOGGER.error("No files saved a : ", e); } } } } catch (ExecutionException e) { LOGGER.error("Failed to copy file from remote server to local directory." + e); } finally { Map<String, Object> eventParams = new HashMap<>(); eventParams.put(Constants.STATE_ID_PARAM, stateId); eventRelay.sendEventMessage(new MotechEvent(Constants.RCH_TALUKA_HEALTHBLOCK_READ_SUBJECT, eventParams)); } } @Override public boolean getTalukaHealthBlockData(LocalDate from, LocalDate to, URL endpoint, Long stateId) { DS_DataResponseDS_DataResult result; Irchwebservices dataService = getService(endpoint); boolean status = false; LOGGER.info(FROM_DATE_LOG, from); try { result = dataService.DS_Data(settingsFacade.getProperty(Constants.RCH_PROJECT_ID), settingsFacade.getProperty(Constants.RCH_USER_ID), settingsFacade.getProperty(Constants.RCH_PASSWORD), from.toString(DATE_FORMAT), to.toString(DATE_FORMAT), stateId.toString(), settingsFacade.getProperty(Constants.RCH_LOCATION_TALUKA_HEALTHBLOCK), settingsFacade.getProperty(Constants.RCH_DTID)); } catch (RemoteException e) { throw new RchWebServiceException("Remote Server Error. Could Not Read RCH taluka-healthblock Data.", e); } LOGGER.debug("writing RCH taluka-healthblock response to file"); File responseFile = generateResponseFile(result, RchUserType.TALUKAHEALTHBLOCK, stateId); if (responseFile != null) { LOGGER.info("RCH taluka-healthblock response successfully written to file. Copying to remote directory."); try { scpResponseToRemote(responseFile.getName()); LOGGER.info("RCH taluka-healthBlock response file successfully copied to remote server"); RchImportFacilitator rchImportFacilitator = new RchImportFacilitator(responseFile.getName(), from, to, stateId, RchUserType.TALUKAHEALTHBLOCK, LocalDate.now()); rchImportFacilitatorService.createImportFileAudit(rchImportFacilitator); status = true; } catch (ExecutionException e) { LOGGER.error(SCP_ERROR, e); } catch (RchFileManipulationException e) { LOGGER.error("invalid file name", e); } } else { LOGGER.error("Error writing response to file."); } return status; } @MotechListener(subjects = Constants.RCH_TALUKA_HEALTHBLOCK_READ_SUBJECT) //NO CHECKSTYLE Cyclomatic Complexity @Transactional public void readTalukaHealthBlockResponseFromFile(MotechEvent event) throws RchFileManipulationException { LOGGER.info("Copying RCH taluka-healthblock response file from remote server to local directory."); Long stateId = (Long) event.getParameters().get(Constants.STATE_ID_PARAM); try { List<RchImportFacilitator> rchImportFacilitatorsTalukaHealthBlocks = rchImportFacilitatorService.findByImportDateStateIdAndRchUserType(stateId, LocalDate.now(), RchUserType.TALUKAHEALTHBLOCK); for (RchImportFacilitator rchImportFacilitatorsTalukaHealthBlock: rchImportFacilitatorsTalukaHealthBlocks ) { ArrayList<Map<String, Object>> talukaHealthBlockArrList = new ArrayList<>(); File localResponseFile = scpResponseToLocal(rchImportFacilitatorsTalukaHealthBlock.getFileName()); if (localResponseFile != null) { LOGGER.info("RCH Taluka-healthblock response file successfully copied from remote server to local directory."); String result = readResponsesFromXml(localResponseFile); State state = stateDataService.findByCode(stateId); String stateName = state.getName() != null ? state.getName() : " "; Long stateCode = state.getCode() != null ? state.getCode() : 1L; LocalDate startDate = rchImportFacilitatorsTalukaHealthBlock.getStartDate(); LocalDate endDate = rchImportFacilitatorsTalukaHealthBlock.getEndDate(); try { if (result.contains(RECORDS)) { RchTalukaHealthBlockDataSet talukaHealthBlockDataSet = (result == null) ? null : (RchTalukaHealthBlockDataSet) MarshallUtils.unmarshall(result, RchTalukaHealthBlockDataSet.class); LOGGER.info("Starting RCH taluka-healthBlock import"); StopWatch stopWatch = new StopWatch(); stopWatch.start(); if (talukaHealthBlockDataSet == null || talukaHealthBlockDataSet.getRecords() == null) { String warning = String.format("No taluka-healthBlock data set received from RCH for %s state", stateName); LOGGER.warn(warning); rchImportAuditDataService.create(new RchImportAudit(startDate, endDate, RchUserType.TALUKAHEALTHBLOCK, stateCode, stateName, 0, 0, warning)); } else { List<RchTalukaHealthBlockRecord> rchTalukaHealthBlockRecords = talukaHealthBlockDataSet.getRecords(); for (RchTalukaHealthBlockRecord record : rchTalukaHealthBlockRecords) { Map<String, Object> locMap = new HashMap<>(); toMapTalukaHealthBlock(locMap, record, stateCode); talukaHealthBlockArrList.add(locMap); } } int count = 0; int partNumber = 0; Long totalUpdatedRecords = 0L; while (count < talukaHealthBlockArrList.size()) { List<Map<String, Object>> recordListPart = new ArrayList<>(); while (recordListPart.size() < LOCATION_PART_SIZE && count < talukaHealthBlockArrList.size()) { recordListPart.add(talukaHealthBlockArrList.get(count)); count++; } partNumber++; totalUpdatedRecords += locationService.createLocationPart(recordListPart, LocationEnum.TALUKAHEALTHBLOCK, rchImportFacilitatorsTalukaHealthBlock.getFileName(), partNumber); recordListPart.clear(); } LOGGER.debug("File {} processed. {} records updated", rchImportFacilitatorsTalukaHealthBlock.getFileName(), totalUpdatedRecords); } else { String warning = String.format("No Taluka-HealthBlock data set received from RCH for %d stateId", stateId); LOGGER.warn(warning); } } catch (JAXBException e) { throw new RchInvalidResponseStructureException(String.format("Cannot deserialize RCH mother data from %s location.", stateId), e); } catch (RchInvalidResponseStructureException e) { String error = String.format("Cannot read RCH taluka healthblock data from %s state with stateId: %d. Response Deserialization Error", stateName, stateId); LOGGER.error(error, e); alertService.create(RCH_WEB_SERVICE, "RCH Web Service Mother Import", e .getMessage() + " " + error, AlertType.CRITICAL, AlertStatus.NEW, 0, null); rchImportAuditDataService.create(new RchImportAudit(startDate, endDate, RchUserType.TALUKAHEALTHBLOCK, stateCode, stateName, 0, 0, error)); rchImportFailRecordDataService.create(new RchImportFailRecord(endDate, RchUserType.TALUKAHEALTHBLOCK, stateId)); } catch (NullPointerException e) { LOGGER.error("No files saved a : ", e); } } } } catch (ExecutionException e) { LOGGER.error("Failed to copy file from remote server to local directory." + e); } finally { Map<String, Object> eventParams = new HashMap<>(); eventParams.put(Constants.STATE_ID_PARAM, stateId); eventRelay.sendEventMessage(new MotechEvent(Constants.RCH_HEALTHFACILITY_READ_SUBJECT, eventParams)); } } @Override public boolean getHealthFacilityData(LocalDate from, LocalDate to, URL endpoint, Long stateId) { DS_DataResponseDS_DataResult result; Irchwebservices dataService = getService(endpoint); boolean status = false; LOGGER.info(FROM_DATE_LOG, from); try { result = dataService.DS_Data(settingsFacade.getProperty(Constants.RCH_PROJECT_ID), settingsFacade.getProperty(Constants.RCH_USER_ID), settingsFacade.getProperty(Constants.RCH_PASSWORD), from.toString(DATE_FORMAT), to.toString(DATE_FORMAT), stateId.toString(), settingsFacade.getProperty(Constants.RCH_LOCATION_HEALTHFACILITY), settingsFacade.getProperty(Constants.RCH_DTID)); } catch (RemoteException e) { throw new RchWebServiceException("Remote Server Error. Could Not Read RCH healthfacility Data.", e); } LOGGER.debug("writing RCH healthfacility response to file"); File responseFile = generateResponseFile(result, RchUserType.HEALTHFACILITY, stateId); if (responseFile != null) { LOGGER.info("RCH healthfacility response successfully written to file. Copying to remote directory."); try { scpResponseToRemote(responseFile.getName()); LOGGER.info("RCH healthfacility response file successfully copied to remote server"); RchImportFacilitator rchImportFacilitator = new RchImportFacilitator(responseFile.getName(), from, to, stateId, RchUserType.HEALTHFACILITY, LocalDate.now()); rchImportFacilitatorService.createImportFileAudit(rchImportFacilitator); status = true; } catch (ExecutionException e) { LOGGER.error(SCP_ERROR, e); } catch (RchFileManipulationException e) { LOGGER.error("invalid file name", e); } } else { LOGGER.error("Error writing response to file."); } return status; } @Override public boolean getHealthSubFacilityData(LocalDate from, LocalDate to, URL endpoint, Long stateId) { DS_DataResponseDS_DataResult result; Irchwebservices dataService = getService(endpoint); boolean status = false; LOGGER.info(FROM_DATE_LOG, from); try { result = dataService.DS_Data(settingsFacade.getProperty(Constants.RCH_PROJECT_ID), settingsFacade.getProperty(Constants.RCH_USER_ID), settingsFacade.getProperty(Constants.RCH_PASSWORD), from.toString(DATE_FORMAT), to.toString(DATE_FORMAT), stateId.toString(), settingsFacade.getProperty(Constants.RCH_LOCATION_HEALTHSUBFACILITY), settingsFacade.getProperty(Constants.RCH_DTID)); } catch (RemoteException e) { throw new RchWebServiceException("Remote Server Error. Could Not Read RCH healthsubfacility Data.", e); } LOGGER.debug("writing RCH healthsubfacility response to file"); File responseFile = generateResponseFile(result, RchUserType.HEALTHSUBFACILITY, stateId); if (responseFile != null) { LOGGER.info("RCH healthsubfacility response successfully written to file. Copying to remote directory."); try { scpResponseToRemote(responseFile.getName()); LOGGER.info("RCH healthsubfacility response file successfully copied to remote server"); RchImportFacilitator rchImportFacilitator = new RchImportFacilitator(responseFile.getName(), from, to, stateId, RchUserType.HEALTHSUBFACILITY, LocalDate.now()); rchImportFacilitatorService.createImportFileAudit(rchImportFacilitator); status = true; } catch (ExecutionException e) { LOGGER.error(SCP_ERROR, e); } catch (RchFileManipulationException e) { LOGGER.error("invalid file name", e); } } else { LOGGER.error("Error writing response to file."); } return status; } @Override public boolean getVillageHealthSubFacilityData(LocalDate from, LocalDate to, URL endpoint, Long stateId) { DS_DataResponseDS_DataResult result; Irchwebservices dataService = getService(endpoint); boolean status = false; LOGGER.info(FROM_DATE_LOG, from); try { result = dataService.DS_Data(settingsFacade.getProperty(Constants.RCH_PROJECT_ID), settingsFacade.getProperty(Constants.RCH_USER_ID), settingsFacade.getProperty(Constants.RCH_PASSWORD), from.toString(DATE_FORMAT), to.toString(DATE_FORMAT), stateId.toString(), settingsFacade.getProperty(Constants.RCH_LOCATION_VILLAGE_HEALTHSUBFACILITY), settingsFacade.getProperty(Constants.RCH_DTID)); } catch (RemoteException e) { throw new RchWebServiceException("Remote Server Error. Could Not Read RCH villagehealthfacility Data.", e); } LOGGER.debug("writing RCH villagehealthfacility response to file"); File responseFile = generateResponseFile(result, RchUserType.VILLAGEHEALTHSUBFACILITY, stateId); if (responseFile != null) { LOGGER.info("RCH villagehealthfacility response successfully written to file. Copying to remote directory."); try { scpResponseToRemote(responseFile.getName()); LOGGER.info("RCH villagehealthfacility response file successfully copied to remote server"); RchImportFacilitator rchImportFacilitator = new RchImportFacilitator(responseFile.getName(), from, to, stateId, RchUserType.VILLAGEHEALTHSUBFACILITY, LocalDate.now()); rchImportFacilitatorService.createImportFileAudit(rchImportFacilitator); status = true; } catch (ExecutionException e) { LOGGER.error(SCP_ERROR, e); } catch (RchFileManipulationException e) { LOGGER.error("invalid file name", e); } } else { LOGGER.error("Error writing response to file."); } return status; } @MotechListener(subjects = Constants.RCH_HEALTHFACILITY_READ_SUBJECT) //NO CHECKSTYLE Cyclomatic Complexity @Transactional public void readHealthFacilityResponseFromFile(MotechEvent event) throws RchFileManipulationException { LOGGER.info("Copying RCH healthfacility response file from remote server to local directory."); Long stateId = (Long) event.getParameters().get(Constants.STATE_ID_PARAM); try { List<RchImportFacilitator> rchImportFacilitatorsHealthFacilities = rchImportFacilitatorService.findByImportDateStateIdAndRchUserType(stateId, LocalDate.now(), RchUserType.HEALTHFACILITY); for (RchImportFacilitator rchImportFacilitatorsHealthFacility: rchImportFacilitatorsHealthFacilities ) { ArrayList<Map<String, Object>> healthFacilityArrList = new ArrayList<>(); File localResponseFile = scpResponseToLocal(rchImportFacilitatorsHealthFacility.getFileName()); if (localResponseFile != null) { LOGGER.info("RCH healthfacility response file successfully copied from remote server to local directory."); String result = readResponsesFromXml(localResponseFile); LOGGER.debug("stateId={}", stateId); State state = stateDataService.findByCode(stateId); String stateName = state.getName() != null ? state.getName() : " "; Long stateCode = state.getCode() != null ? state.getCode() : 1L; LOGGER.debug("stateCode={}", stateCode); LocalDate startDate = rchImportFacilitatorsHealthFacility.getStartDate(); LocalDate endDate = rchImportFacilitatorsHealthFacility.getEndDate(); try { if (result.contains(RECORDS)) { RchHealthFacilityDataSet healthFacilityDataSet = (result == null) ? null : (RchHealthFacilityDataSet) MarshallUtils.unmarshall(result, RchHealthFacilityDataSet.class); LOGGER.info("Starting RCH healthfacility import"); StopWatch stopWatch = new StopWatch(); stopWatch.start(); if (healthFacilityDataSet == null || healthFacilityDataSet.getRecords() == null) { String warning = String.format("No healthfacility data set received from RCH for %s state", stateName); LOGGER.warn(warning); rchImportAuditDataService.create(new RchImportAudit(startDate, endDate, RchUserType.HEALTHFACILITY, stateCode, stateName, 0, 0, warning)); } else { List<RchHealthFacilityRecord> rchHealthFacilityRecords = healthFacilityDataSet.getRecords(); for (RchHealthFacilityRecord record : rchHealthFacilityRecords) { Map<String, Object> locMap = new HashMap<>(); toMapHealthFacility(locMap, record, stateCode); healthFacilityArrList.add(locMap); } } int count = 0; int partNumber = 0; Long totalUpdatedRecords = 0L; while (count < healthFacilityArrList.size()) { List<Map<String, Object>> recordListPart = new ArrayList<>(); while (recordListPart.size() < LOCATION_PART_SIZE && count < healthFacilityArrList.size()) { recordListPart.add(healthFacilityArrList.get(count)); count++; } partNumber++; totalUpdatedRecords += locationService.createLocationPart(recordListPart, LocationEnum.HEALTHFACILITY, rchImportFacilitatorsHealthFacility.getFileName(), partNumber); recordListPart.clear(); } LOGGER.debug("File {} processed. {} records updated", rchImportFacilitatorsHealthFacility.getFileName(), totalUpdatedRecords); } else { String warning = String.format("No Healthfacility data set received from RCH for %d stateId", stateId); LOGGER.warn(warning); } } catch (JAXBException e) { throw new RchInvalidResponseStructureException(String.format("Cannot deserialize RCH healthfacility data from %s location.", stateId), e); } catch (RchInvalidResponseStructureException e) { String error = String.format("Cannot read RCH healthfacility data from %s state with stateId: %d. Response Deserialization Error", stateName, stateId); LOGGER.error(error, e); alertService.create(RCH_WEB_SERVICE, "RCH Web Service healthfacility Import", e .getMessage() + " " + error, AlertType.CRITICAL, AlertStatus.NEW, 0, null); rchImportAuditDataService.create(new RchImportAudit(startDate, endDate, RchUserType.HEALTHFACILITY, stateCode, stateName, 0, 0, error)); rchImportFailRecordDataService.create(new RchImportFailRecord(endDate, RchUserType.HEALTHFACILITY, stateId)); } catch (NullPointerException e) { LOGGER.error("No files saved a : ", e); } } } } catch (ExecutionException e) { LOGGER.error("Failed to copy file from remote server to local directory." + e); } finally { Map<String, Object> eventParams = new HashMap<>(); eventParams.put(Constants.STATE_ID_PARAM, stateId); eventRelay.sendEventMessage(new MotechEvent(Constants.RCH_HEALTHSUBFACILITY_READ_SUBJECT, eventParams)); } } @MotechListener(subjects = Constants.RCH_HEALTHSUBFACILITY_READ_SUBJECT) //NO CHECKSTYLE Cyclomatic Complexity @Transactional public void readHealthSubFacilityResponseFromFile(MotechEvent event) throws RchFileManipulationException { Long stateId = (Long) event.getParameters().get(Constants.STATE_ID_PARAM); LOGGER.info("Copying RCH healthsubfacility response file from remote server to local directory."); try { List<RchImportFacilitator> rchImportFacilitatorsHealthSubFacilities = rchImportFacilitatorService.findByImportDateStateIdAndRchUserType(stateId, LocalDate.now(), RchUserType.HEALTHSUBFACILITY); for (RchImportFacilitator rchImportFacilitatorsHealthSubFacility: rchImportFacilitatorsHealthSubFacilities ) { ArrayList<Map<String, Object>> healthSubFacilityArrList = new ArrayList<>(); File localResponseFile = scpResponseToLocal(rchImportFacilitatorsHealthSubFacility.getFileName()); if (localResponseFile != null) { LOGGER.info("RCH healthsubfacility response file successfully copied from remote server to local directory."); String result = readResponsesFromXml(localResponseFile); LOGGER.debug("stateId={}", stateId); State state = stateDataService.findByCode(stateId); String stateName = state.getName() != null ? state.getName() : " "; Long stateCode = state.getCode() != null ? state.getCode() : 1L; LOGGER.debug("stateCode={}", stateCode); LocalDate startDate = rchImportFacilitatorsHealthSubFacility.getStartDate(); LocalDate endDate = rchImportFacilitatorsHealthSubFacility.getEndDate(); try { if (result.contains(RECORDS)) { RchHealthSubFacilityDataSet healthSubFacilityDataSet = (result == null) ? null : (RchHealthSubFacilityDataSet) MarshallUtils.unmarshall(result, RchHealthSubFacilityDataSet.class); LOGGER.info("Starting RCH healthsubfacility import"); StopWatch stopWatch = new StopWatch(); stopWatch.start(); if (healthSubFacilityDataSet == null || healthSubFacilityDataSet.getRecords() == null) { String warning = String.format("No healthsubfacility data set received from RCH for %s state", stateName); LOGGER.warn(warning); rchImportAuditDataService.create(new RchImportAudit(startDate, endDate, RchUserType.HEALTHSUBFACILITY, stateCode, stateName, 0, 0, warning)); } else { List<RchHealthSubFacilityRecord> rchHealthFacilityRecords = healthSubFacilityDataSet.getRecords(); for (RchHealthSubFacilityRecord record : rchHealthFacilityRecords) { Map<String, Object> locMap = new HashMap<>(); toMapHealthSubFacility(locMap, record, stateCode); healthSubFacilityArrList.add(locMap); } } int count = 0; int partNumber = 0; Long totalUpdatedRecords = 0L; while (count < healthSubFacilityArrList.size()) { List<Map<String, Object>> recordListPart = new ArrayList<>(); while (recordListPart.size() < LOCATION_PART_SIZE && count < healthSubFacilityArrList.size()) { recordListPart.add(healthSubFacilityArrList.get(count)); count++; } partNumber++; totalUpdatedRecords += locationService.createLocationPart(recordListPart, LocationEnum.HEALTHSUBFACILITY, rchImportFacilitatorsHealthSubFacility.getFileName(), partNumber); recordListPart.clear(); } LOGGER.debug("File {} processed. {} records updated", rchImportFacilitatorsHealthSubFacility.getFileName(), totalUpdatedRecords); } else { String warning = String.format("No healthsubfacility data set received from RCH for %d stateId", stateId); LOGGER.warn(warning); } } catch (JAXBException e) { throw new RchInvalidResponseStructureException(String.format("Cannot deserialize RCH healthsubfacility data from %s location.", stateId), e); } catch (RchInvalidResponseStructureException e) { String error = String.format("Cannot read RCH healthsubfacility data from %s state with stateId: %d. Response Deserialization Error", stateName, stateId); LOGGER.error(error, e); alertService.create(RCH_WEB_SERVICE, "RCH Web Service healthsubfacility Import", e .getMessage() + " " + error, AlertType.CRITICAL, AlertStatus.NEW, 0, null); rchImportAuditDataService.create(new RchImportAudit(startDate, endDate, RchUserType.HEALTHSUBFACILITY, stateCode, stateName, 0, 0, error)); rchImportFailRecordDataService.create(new RchImportFailRecord(endDate, RchUserType.HEALTHSUBFACILITY, stateId)); } catch (NullPointerException e) { LOGGER.error("No files saved a : ", e); } } } } catch (ExecutionException e) { LOGGER.error("Failed to copy file from remote server to local directory." + e); } finally { Map<String, Object> eventParams = new HashMap<>(); eventParams.put(Constants.STATE_ID_PARAM, stateId); eventRelay.sendEventMessage(new MotechEvent(Constants.RCH_VILLAGE_READ_SUBJECT, eventParams)); } } @MotechListener(subjects = Constants.RCH_VILLAGE_HEALTHSUBFACILITY_READ_SUBJECT) //NO CHECKSTYLE Cyclomatic Complexity @Transactional public void readVillageHealthSubFacilityResponseFromFile(MotechEvent event) throws RchFileManipulationException { Long stateId = (Long) event.getParameters().get(Constants.STATE_ID_PARAM); LOGGER.info("Copying RCH villageHealthsubfacility response file from remote server to local directory."); try { List<RchImportFacilitator> rchImportFacilitatorsVillageHealthSubFacilities = rchImportFacilitatorService.findByImportDateStateIdAndRchUserType(stateId, LocalDate.now(), RchUserType.VILLAGEHEALTHSUBFACILITY); for (RchImportFacilitator rchImportFacilitatorsVillageHealthSubFacility: rchImportFacilitatorsVillageHealthSubFacilities ) { ArrayList<Map<String, Object>> villageHealthSubFacilityArrList = new ArrayList<>(); File localResponseFile = scpResponseToLocal(rchImportFacilitatorsVillageHealthSubFacility.getFileName()); if (localResponseFile != null) { LOGGER.info("RCH villageHealthsubfacility response file successfully copied from remote server to local directory."); String result = readResponsesFromXml(localResponseFile); LOGGER.debug("stateId={}", stateId); State state = stateDataService.findByCode(stateId); String stateName = state.getName() != null ? state.getName() : " "; Long stateCode = state.getCode() != null ? state.getCode() : 1L; LOGGER.debug("stateCode={}", stateCode); LocalDate startDate = rchImportFacilitatorsVillageHealthSubFacility.getStartDate(); LocalDate endDate = rchImportFacilitatorsVillageHealthSubFacility.getEndDate(); try { if (result.contains(RECORDS)) { RchVillageHealthSubFacilityDataSet villageHealthSubFacilityDataSet = (result == null) ? null : (RchVillageHealthSubFacilityDataSet) MarshallUtils.unmarshall(result, RchVillageHealthSubFacilityDataSet.class); LOGGER.info("Starting RCH villageHealthsubfacility import"); StopWatch stopWatch = new StopWatch(); stopWatch.start(); if (villageHealthSubFacilityDataSet == null || villageHealthSubFacilityDataSet.getRecords() == null) { String warning = String.format("No villageHealthsubfacility data set received from RCH for %s state", stateName); LOGGER.warn(warning); rchImportAuditDataService.create(new RchImportAudit(startDate, endDate, RchUserType.VILLAGEHEALTHSUBFACILITY, stateCode, stateName, 0, 0, warning)); } else { List<RchVillageHealthSubFacilityRecord> rchVillageHealthFacilityRecords = villageHealthSubFacilityDataSet.getRecords(); for (RchVillageHealthSubFacilityRecord record : rchVillageHealthFacilityRecords) { Map<String, Object> locMap = new HashMap<>(); toMapVillageHealthSubFacility(locMap, record, stateCode); villageHealthSubFacilityArrList.add(locMap); } } int count = 0; int partNumber = 0; Long totalUpdatedRecords = 0L; while (count < villageHealthSubFacilityArrList.size()) { List<Map<String, Object>> recordListPart = new ArrayList<>(); while (recordListPart.size() < LOCATION_PART_SIZE && count < villageHealthSubFacilityArrList.size()) { recordListPart.add(villageHealthSubFacilityArrList.get(count)); count++; } partNumber++; totalUpdatedRecords += locationService.createLocationPart(recordListPart, LocationEnum.VILLAGEHEALTHSUBFACILITY, rchImportFacilitatorsVillageHealthSubFacility.getFileName(), partNumber); recordListPart.clear(); } LOGGER.debug("File {} processed. {} records updated", rchImportFacilitatorsVillageHealthSubFacility.getFileName(), totalUpdatedRecords); } else { String warning = String.format("No villageHealthsubfacility data set received from RCH for %d stateId", stateId); LOGGER.warn(warning); } } catch (JAXBException e) { throw new RchInvalidResponseStructureException(String.format("Cannot deserialize RCH villageHealthsubfacility data from %s location.", stateId), e); } catch (RchInvalidResponseStructureException e) { String error = String.format("Cannot read RCH villageHealthsubfacility data from %s state with stateId: %d. Response Deserialization Error", stateName, stateId); LOGGER.error(error, e); alertService.create(RCH_WEB_SERVICE, "RCH Web Service villageHealthsubfacility Import", e .getMessage() + " " + error, AlertType.CRITICAL, AlertStatus.NEW, 0, null); rchImportAuditDataService.create(new RchImportAudit(startDate, endDate, RchUserType.VILLAGEHEALTHSUBFACILITY, stateCode, stateName, 0, 0, error)); rchImportFailRecordDataService.create(new RchImportFailRecord(endDate, RchUserType.VILLAGEHEALTHSUBFACILITY, stateId)); } catch (NullPointerException e) { LOGGER.error("No files saved a : ", e); } } } } catch (ExecutionException e) { LOGGER.error("Failed to copy file from remote server to local directory." + e); } } private Irchwebservices getService(URL endpoint) { try { if (endpoint != null) { return rchServiceLocator.getBasicHttpBinding_Irchwebservices(endpoint); } else { return rchServiceLocator.getBasicHttpBinding_Irchwebservices(); } } catch (ServiceException e) { throw new RchWebServiceException("Cannot retrieve RCH Service for the endpoint", e); } } private void validMothersDataResponse(DS_DataResponseDS_DataResult data, Long stateId) { if (data.get_any().length != 2) { throw new RchInvalidResponseStructureException("Invalid mothers data response for location " + stateId); } if (data.get_any()[1].getChildren() != null && data.get_any()[1].getChildren().size() < 1) { throw new RchInvalidResponseStructureException("Invalid mothers data response " + stateId); } } private void validTalukasDataResponse(DS_DataResponseDS_DataResult data, Long stateId) { if (data.get_any().length != 2) { throw new RchInvalidResponseStructureException("Invalid taluka data response for location " + stateId); } if (data.get_any()[1].getChildren() != null && data.get_any()[1].getChildren().size() < 1) { throw new RchInvalidResponseStructureException("Invalid taluka data response " + stateId); } } private void validHealthBlockDataResponse(DS_DataResponseDS_DataResult data, Long stateId) { if (data.get_any().length != 2) { throw new RchInvalidResponseStructureException("Invalid healthblock data response for location " + stateId); } if (data.get_any()[1].getChildren() != null && data.get_any()[1].getChildren().size() < 1) { throw new RchInvalidResponseStructureException("Invalid healthblock data response " + stateId); } } private void validTalukaHealthBlockDataResponse(DS_DataResponseDS_DataResult data, Long stateId) { if (data.get_any().length != 2) { throw new RchInvalidResponseStructureException("Invalid taluka-healthblock data response for location " + stateId); } if (data.get_any()[1].getChildren() != null && data.get_any()[1].getChildren().size() < 1) { throw new RchInvalidResponseStructureException("Invalid taluka-healthblock data response " + stateId); } } private void validChildrenDataResponse(DS_DataResponseDS_DataResult data, Long stateId) { if (data.get_any().length != 2) { throw new RchInvalidResponseStructureException("Invalid children data response for location " + stateId); } if (data.get_any()[1].getChildren() != null && data.get_any()[1].getChildren().size() < 1) { throw new RchInvalidResponseStructureException("Invalid children data response " + stateId); } } private void validAnmAshaDataResponse(DS_DataResponseDS_DataResult data, Long stateId) { if (data.get_any().length != 2) { throw new RchInvalidResponseStructureException("Invalid anm asha data response for location " + stateId); } if (data.get_any()[1].getChildren() != null && data.get_any()[1].getChildren().size() < 1) { throw new RchInvalidResponseStructureException("Invalid anm asha data response " + stateId); } } private String targetFileName(String timeStamp, RchUserType userType, Long stateId) { switch (userType) { case MOTHER: return String.format("RCH_StateID_%d_Mother_Response_%s.xml", stateId, timeStamp); case CHILD: return String.format("RCH_StateID_%d_Child_Response_%s.xml", stateId, timeStamp); case ASHA: return String.format("RCH_StateID_%d_Asha_Response_%s.xml", stateId, timeStamp); case TALUKA: return String.format("RCH_StateID_%d_Taluka_Response_%s.xml", stateId, timeStamp); case HEALTHBLOCK: return String.format("RCH_StateID_%d_HealthBlock_Response_%s.xml", stateId, timeStamp); case TALUKAHEALTHBLOCK: return String.format("RCH_StateID_%d_Taluka_HealthBlock_Response_%s.xml", stateId, timeStamp); case DISTRICT: return String.format("RCH_StateID_%d_District_Response_%s.xml", stateId, timeStamp); case VILLAGE: return String.format("RCH_StateID_%d_Village_Response_%s.xml", stateId, timeStamp); case HEALTHFACILITY: return String.format("RCH_StateID_%d_HealthFacility_Response_%s.xml", stateId, timeStamp); case HEALTHSUBFACILITY: return String.format("RCH_StateID_%d_HealthSubFacility_Response_%s.xml", stateId, timeStamp); case VILLAGEHEALTHSUBFACILITY: return String.format("RCH_StateID_%d_Village_HealthSubFacility_Response_%s.xml", stateId, timeStamp); default: return "Null"; } } private File generateResponseFile(DS_DataResponseDS_DataResult result, RchUserType userType, Long stateId) { String targetFileName = targetFileName(TIME_FORMATTER.print(DateTime.now()), userType, stateId); File localResponseDir = localResponseDir(); File localResponseFile = new File(localResponseDir, targetFileName); try { FileWriter writer = new FileWriter(localResponseFile); writer.write(serializeAxisObject(result)); writer.flush(); writer.close(); } catch (Exception e) { LOGGER.debug("Failed deserialization", e); LOGGER.error((e.toString())); return null; } return localResponseFile; } private RchImportAudit saveImportedMothersData(RchMothersDataSet mothersDataSet, String stateName, Long stateCode, LocalDate startReferenceDate, LocalDate endReferenceDate) { //NOPMD NcssMethodCount LOGGER.info("Starting RCH mother import for state {}", stateName); List<RchMotherRecord> motherRecords = mothersDataSet.getRecords(); List<Map<String, Object>> validMotherRecords = new ArrayList<>(); validMotherRecords = getLMPValidRecords(motherRecords); List<List<Map<String, Object>>> rchMotherRecordsSet = cleanRchMotherRecords(validMotherRecords); List<Map<String, Object>> rejectedRchMothers = rchMotherRecordsSet.get(0); String action = ""; int saved = 0; int rejected = motherRecords.size() - validMotherRecords.size(); Map<String, Object> rejectedMothers = new HashMap<>(); Map<String, Object> rejectionStatus = new HashMap<>(); MotherImportRejection motherImportRejection; for (Map<String, Object> record : rejectedRchMothers) { action = (String) record.get(KilkariConstants.ACTION); LOGGER.debug("Existing Mother Record with same MSISDN in the data set"); motherImportRejection = motherRejectionRch(convertMapToRchMother(record), false, RejectionReasons.DUPLICATE_MOBILE_NUMBER_IN_DATASET.toString(), action); rejectedMothers.put(motherImportRejection.getRegistrationNo(), motherImportRejection); rejectionStatus.put(motherImportRejection.getRegistrationNo(), motherImportRejection.getAccepted()); rejected++; } List<Map<String, Object>> acceptedRchMothers = rchMotherRecordsSet.get(1); Map<Long, Set<Long>> hpdMap = getHpdFilters(); List<Map<String, Object>> recordList = new ArrayList<>(); for (Map<String, Object> recordMap : acceptedRchMothers) { boolean hpdValidation = validateHpdUser(hpdMap, (long) recordMap.get(KilkariConstants.STATE_ID), (long) recordMap.get(KilkariConstants.DISTRICT_ID)); if (hpdValidation) { recordList.add(recordMap); } } LocationFinder locationFinder = locationService.updateLocations(recordList); recordList = mctsBeneficiaryImportReaderService.sortByMobileNumber(recordList, false); Timer timer = new Timer("mom", "moms"); List<List<Map<String, Object>>> recordListArray = mctsBeneficiaryImportReaderService.splitRecords(recordList, KilkariConstants.MOBILE_NO); LOGGER.debug("Thread Processing Start"); Integer recordsProcessed = 0; ExecutorService executor = Executors.newCachedThreadPool(); List<Future<ThreadProcessorObject>> list = new ArrayList<>(); for (int i = 0; i < recordListArray.size(); i++) { Callable<ThreadProcessorObject> callable = new MotherCsvThreadProcessor(recordListArray.get(i), false, SubscriptionOrigin.RCH_IMPORT, locationFinder, mctsBeneficiaryValueProcessor, mctsBeneficiaryImportService); Future<ThreadProcessorObject> future = executor.submit(callable); list.add(future); } for (Future<ThreadProcessorObject> fut : list) { try { ThreadProcessorObject threadProcessorObject = fut.get(); Map<String,Object> rejectedBen = threadProcessorObject.getRejectedBeneficiaries(); rejectedMothers.putAll(rejectedBen); int currentRej = rejectedBen.size(); rejected += currentRej; Integer currentRecordsPro = threadProcessorObject.getRecordsProcessed(); saved += currentRecordsPro - currentRej; rejectionStatus.putAll(threadProcessorObject.getRejectionStatus()); recordsProcessed += currentRecordsPro; } catch (InterruptedException | java.util.concurrent.ExecutionException e) { LOGGER.error("Error while running thread", e); } } executor.shutdown(); try { executor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS); } catch (InterruptedException e) { LOGGER.error("Error while Terminating thread", e); } LOGGER.debug("Thread Processing End"); LOGGER.debug(KilkariConstants.IMPORTED, timer.frequency(recordsProcessed)); try { mctsBeneficiaryImportService.createOrUpdateRchMotherRejections(rejectedMothers , rejectionStatus); } catch (RuntimeException e) { LOGGER.error(BULK_REJECTION_ERROR_MESSAGE, e); } LOGGER.info("RCH import: {} state, Total: {} mothers imported, {} mothers rejected", stateName, saved, rejected); return new RchImportAudit(startReferenceDate, endReferenceDate, RchUserType.MOTHER, stateCode, stateName, saved, rejected, null); } private List<Map<String, Object>> getLMPValidRecords(List<RchMotherRecord> motherRecords) { List<Map<String, Object>> validMotherRecords = new ArrayList<>(); Map<String, Object> rejectedMothers = new HashMap<>(); Map<String, Object> rejectionStatus = new HashMap<>(); MotherImportRejection motherImportRejection; for (RchMotherRecord record : motherRecords) { Map<String, Object> recordMap = toMap(record); MctsMother mother; Long msisdn; String beneficiaryId; String action = KilkariConstants.CREATE; beneficiaryId = (String) recordMap.get(KilkariConstants.RCH_ID); String mctsId = (String) recordMap.get(KilkariConstants.MCTS_ID); msisdn = (Long) recordMap.get(KilkariConstants.MOBILE_NO); DateTime lmp = (DateTime) recordMap.get(KilkariConstants.LMP); mother = mctsBeneficiaryValueProcessor.getOrCreateRchMotherInstance(beneficiaryId, mctsId); recordMap.put(KilkariConstants.RCH_MOTHER, mother); if (mother == null) { motherImportRejection = motherRejectionRch(convertMapToRchMother(recordMap), false, RejectionReasons.DATA_INTEGRITY_ERROR.toString(), action); rejectedMothers.put(motherImportRejection.getRegistrationNo(), motherImportRejection); rejectionStatus.put(motherImportRejection.getRegistrationNo(), motherImportRejection.getAccepted()); } else { if ((mother.getId() == null || (mother.getId() != null && mother.getLastMenstrualPeriod() == null)) && !mctsBeneficiaryImportService.validateReferenceDate(lmp, SubscriptionPackType.PREGNANCY, msisdn, beneficiaryId, SubscriptionOrigin.MCTS_IMPORT)) { motherImportRejection = motherRejectionRch(convertMapToRchMother(recordMap), false, RejectionReasons.INVALID_LMP_DATE.toString(), action); rejectedMothers.put(motherImportRejection.getRegistrationNo(), motherImportRejection); rejectionStatus.put(motherImportRejection.getRegistrationNo(), motherImportRejection.getAccepted()); } else { action = mother.getId() == null ? KilkariConstants.CREATE : KilkariConstants.UPDATE; recordMap.put(KilkariConstants.ACTION, action); validMotherRecords.add(recordMap); } } } try { mctsBeneficiaryImportService.createOrUpdateRchMotherRejections(rejectedMothers , rejectionStatus); } catch (RuntimeException e) { LOGGER.error(BULK_REJECTION_ERROR_MESSAGE, e); } return validMotherRecords; } private RchImportAudit saveImportedChildrenData(RchChildrenDataSet childrenDataSet, String stateName, Long stateCode, LocalDate startReferenceDate, LocalDate endReferenceDate) { //NOPMD NcssMethodCount LOGGER.info("Starting RCH children import for state {}", stateName); List<RchChildRecord> childRecords = childrenDataSet.getRecords(); List<Map<String, Object>> validChildRecords = new ArrayList<>(); validChildRecords = getDOBValidChildRecords(childRecords); List<List<Map<String, Object>>> rchChildRecordsSet = cleanRchChildRecords(validChildRecords); List<Map<String, Object>> rejectedRchChildren = rchChildRecordsSet.get(0); String action = ""; int saved = 0; int rejected = childRecords.size() - validChildRecords.size(); Map<String, Object> rejectedChilds = new HashMap<>(); Map<String, Object> rejectionStatus = new HashMap<>(); ChildImportRejection childImportRejection; for (Map<String, Object> record : rejectedRchChildren) { action = (String) record.get(KilkariConstants.ACTION); LOGGER.debug("Existing Child Record with same MSISDN in the data set"); childImportRejection = childRejectionRch(convertMapToRchChild(record), false, RejectionReasons.DUPLICATE_MOBILE_NUMBER_IN_DATASET.toString(), action); rejectedChilds.put(childImportRejection.getRegistrationNo(), childImportRejection); rejectionStatus.put(childImportRejection.getRegistrationNo(), childImportRejection.getAccepted()); rejected++; } List<Map<String, Object>> acceptedRchChildren = rchChildRecordsSet.get(1); Map<Long, Set<Long>> hpdMap = getHpdFilters(); List<Map<String, Object>> recordList = new ArrayList<>(); for (Map<String, Object> recordMap : acceptedRchChildren) { boolean hpdValidation = validateHpdUser(hpdMap, (long) recordMap.get(KilkariConstants.STATE_ID), (long) recordMap.get(KilkariConstants.DISTRICT_ID)); if (hpdValidation) { recordList.add(recordMap); } } LocationFinder locationFinder = locationService.updateLocations(recordList); recordList = mctsBeneficiaryImportReaderService.sortByMobileNumber(recordList, false); Timer timer = new Timer("kid", "kids"); List<List<Map<String, Object>>> recordListArray = mctsBeneficiaryImportReaderService.splitRecords(recordList, KilkariConstants.MOBILE_NO); LOGGER.debug("Thread Processing Start"); Integer recordsProcessed = 0; ExecutorService executor = Executors.newCachedThreadPool(); List<Future<ThreadProcessorObject>> list = new ArrayList<>(); for (int i = 0; i < recordListArray.size(); i++) { Callable<ThreadProcessorObject> callable = new ChildCsvThreadProcessor(recordListArray.get(i), false, SubscriptionOrigin.RCH_IMPORT, locationFinder, mctsBeneficiaryValueProcessor, mctsBeneficiaryImportService); Future<ThreadProcessorObject> future = executor.submit(callable); list.add(future); } for (Future<ThreadProcessorObject> fut : list) { try { ThreadProcessorObject threadProcessorObject = fut.get(); Map<String,Object> currRejBen = threadProcessorObject.getRejectedBeneficiaries(); Integer currRejBenSize = currRejBen.size(); rejectedChilds.putAll(currRejBen); rejectionStatus.putAll(threadProcessorObject.getRejectionStatus()); Integer currentRecordsProcessed = threadProcessorObject.getRecordsProcessed(); recordsProcessed += currentRecordsProcessed; rejected += currRejBenSize; saved += recordsProcessed - currRejBenSize; } catch (InterruptedException | java.util.concurrent.ExecutionException e) { LOGGER.error("Error while running thread", e); } } executor.shutdown(); try { executor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS); } catch (InterruptedException e) { LOGGER.error("Error while Terminating thread", e); } LOGGER.debug("Thread Processing End"); LOGGER.debug(KilkariConstants.IMPORTED, timer.frequency(recordsProcessed)); try { mctsBeneficiaryImportService.createOrUpdateRchChildRejections(rejectedChilds , rejectionStatus); } catch (RuntimeException e) { LOGGER.error(BULK_REJECTION_ERROR_MESSAGE, e); } LOGGER.info("RCH import: {} state, Total: {} children imported, {} children rejected", stateName, saved, rejected); return new RchImportAudit(startReferenceDate, endReferenceDate, RchUserType.CHILD, stateCode, stateName, saved, rejected, null); } private List<Map<String, Object>> getDOBValidChildRecords(List<RchChildRecord> childRecords) { List<Map<String, Object>> validChildRecords = new ArrayList<>(); Map<String, Object> rejectedChilds = new HashMap<>(); Map<String, Object> rejectionStatus = new HashMap<>(); ChildImportRejection childImportRejection; for (RchChildRecord record : childRecords) { Map<String, Object> recordMap = toMap(record); MctsChild child; Long msisdn; String childId; String action = KilkariConstants.CREATE; childId = (String) recordMap.get(KilkariConstants.RCH_ID); String mctsId = (String) recordMap.get(KilkariConstants.MCTS_ID); msisdn = (Long) recordMap.get(KilkariConstants.MOBILE_NO); DateTime dob = (DateTime) recordMap.get(KilkariConstants.DOB); // add child to the record child = mctsBeneficiaryValueProcessor.getOrCreateRchChildInstance(childId, mctsId); recordMap.put(KilkariConstants.RCH_CHILD, child); if (child == null) { childImportRejection = childRejectionRch(convertMapToRchChild(recordMap), false, RejectionReasons.DATA_INTEGRITY_ERROR.toString(), action); rejectedChilds.put(childImportRejection.getRegistrationNo(), childImportRejection); rejectionStatus.put(childImportRejection.getRegistrationNo(), childImportRejection.getAccepted()); } else { if (child.getId() == null && !mctsBeneficiaryImportService.validateReferenceDate(dob, SubscriptionPackType.CHILD, msisdn, childId, SubscriptionOrigin.RCH_IMPORT)) { childImportRejection = childRejectionRch(convertMapToRchChild(recordMap), false, RejectionReasons.INVALID_DOB.toString(), action); rejectedChilds.put(childImportRejection.getRegistrationNo(), childImportRejection); rejectionStatus.put(childImportRejection.getRegistrationNo(), childImportRejection.getAccepted()); } else { action = (child.getId() == null) ? KilkariConstants.CREATE : KilkariConstants.UPDATE; recordMap.put(KilkariConstants.ACTION, action); validChildRecords.add(recordMap); } } } try { mctsBeneficiaryImportService.createOrUpdateRchChildRejections(rejectedChilds , rejectionStatus); } catch (RuntimeException e) { LOGGER.error(BULK_REJECTION_ERROR_MESSAGE, e); } return validChildRecords; } private RchImportAudit saveImportedAshaData(RchAnmAshaDataSet anmAshaDataSet, String stateName, Long stateCode, LocalDate startReferenceDate, LocalDate endReferenceDate) { //NOPMD NcssMethodCount // NO CHECKSTYLE Cyclomatic Complexity LOGGER.info("Starting RCH ASHA import for state {}", stateName); List<List<RchAnmAshaRecord>> rchAshaRecordsSet = cleanRchFlwRecords(anmAshaDataSet.getRecords()); List<RchAnmAshaRecord> rejectedRchAshas = rchAshaRecordsSet.get(0); String action = ""; for (RchAnmAshaRecord record : rejectedRchAshas) { action = this.rchFlwActionFinder(record); LOGGER.debug("Existing Asha Record with same MSISDN in the data set"); flwRejectionService.createUpdate(flwRejectionRch(record, false, RejectionReasons.DUPLICATE_MOBILE_NUMBER_IN_DATASET.toString(), action)); } List<RchAnmAshaRecord> acceptedRchAshas = rchAshaRecordsSet.get(1); int saved = 0; int rejected = 0; State state = stateDataService.findByCode(stateCode); for (RchAnmAshaRecord record : acceptedRchAshas) { try { action = this.rchFlwActionFinder(record); String designation = record.getGfType(); designation = (designation != null ? designation.trim() : designation); Long msisdn = Long.parseLong(record.getMobileNo()); String flwId = record.getGfId().toString(); FrontLineWorker flw = frontLineWorkerService.getByContactNumber(msisdn); if ((flw != null && (!flwId.equals(flw.getMctsFlwId()) || state != flw.getState())) && flw.getStatus() != FrontLineWorkerStatus.ANONYMOUS) { LOGGER.debug("Existing FLW with same MSISDN but different MCTS ID"); flwRejectionService.createUpdate(flwRejectionRch(record, false, RejectionReasons.MOBILE_NUMBER_ALREADY_IN_USE.toString(), action)); rejected++; } else { if (!(FlwConstants.ASHA_TYPE.equalsIgnoreCase(designation))) { flwRejectionService.createUpdate(flwRejectionRch(record, false, RejectionReasons.FLW_TYPE_NOT_ASHA.toString(), action)); rejected++; } else { try { // get user property map Map<String, Object> recordMap = record.toFlwRecordMap(); // temp var used for debugging frontLineWorkerImportService.importRchFrontLineWorker(recordMap, state); flwRejectionService.createUpdate(flwRejectionRch(record, true, null, action)); saved++; } catch (InvalidLocationException e) { LOGGER.warn("Invalid location for FLW: ", e); flwRejectionService.createUpdate(flwRejectionRch(record, false, RejectionReasons.INVALID_LOCATION.toString(), action)); rejected++; } catch (FlwImportException e) { LOGGER.debug("Existing FLW with same MSISDN but different RCH ID", e); flwRejectionService.createUpdate(flwRejectionRch(record, false, RejectionReasons.MOBILE_NUMBER_ALREADY_IN_USE.toString(), action)); rejected++; } catch (FlwExistingRecordException e) { LOGGER.error("Cannot import FLW with ID: {}, and MSISDN (Mobile_No): {}", record.getGfId(), record.getMobileNo(), e); flwRejectionService.createUpdate(flwRejectionRch(record, false, RejectionReasons.UPDATED_RECORD_ALREADY_EXISTS.toString(), action)); rejected++; } catch (Exception e) { LOGGER.error("RCH Flw import Error. Cannot import FLW with ID: {}, and MSISDN (Mobile_No): {}", record.getGfId(), record.getMobileNo(), e); flwRejectionService.createUpdate(flwRejectionRch(record, false, RejectionReasons.FLW_IMPORT_ERROR.toString(), action)); rejected++; } } if ((saved + rejected) % THOUSAND == 0) { LOGGER.debug("RCH import: {} state, Progress: {} Ashas imported, {} Ashas rejected", stateName, saved, rejected); } } } catch (NumberFormatException e) { LOGGER.error("Mobile number either not present or is not in number format"); flwRejectionService.createUpdate(flwRejectionRch(record, false, RejectionReasons.MOBILE_NUMBER_EMPTY_OR_WRONG_FORMAT.toString(), action)); } } LOGGER.info("RCH import: {} state, Total: {} Ashas imported, {} Ashas rejected", stateName, saved, rejected); return new RchImportAudit(startReferenceDate, endReferenceDate, RchUserType.ASHA, stateCode, stateName, saved, rejected, null); } private Map<String, Object> toMap(RchMotherRecord motherRecord) { Map<String, Object> map = new HashMap<>(); toMapLocMother(map, motherRecord); map.put(KilkariConstants.MCTS_ID, motherRecord.getMctsIdNo()); map.put(KilkariConstants.RCH_ID, motherRecord.getRegistrationNo()); map.put(KilkariConstants.BENEFICIARY_NAME, motherRecord.getName()); map.put(KilkariConstants.MOBILE_NO, mctsBeneficiaryValueProcessor.getMsisdnByString(motherRecord.getMobileNo())); map.put(KilkariConstants.LMP, mctsBeneficiaryValueProcessor.getDateByString(motherRecord.getLmpDate())); map.put(KilkariConstants.MOTHER_DOB, mctsBeneficiaryValueProcessor.getDateByString(motherRecord.getBirthDate())); map.put(KilkariConstants.ABORTION_TYPE, mctsBeneficiaryValueProcessor.getAbortionDataFromString(motherRecord.getAbortionType())); map.put(KilkariConstants.DELIVERY_OUTCOMES, mctsBeneficiaryValueProcessor.getStillBirthFromString(String.valueOf(motherRecord.getDeliveryOutcomes()))); map.put(KilkariConstants.DEATH, mctsBeneficiaryValueProcessor.getDeathFromString(String.valueOf(motherRecord.getEntryType()))); map.put(KilkariConstants.EXECUTION_DATE, "".equals(motherRecord.getExecDate()) ? null : mctsBeneficiaryValueProcessor.getLocalDateByString(motherRecord.getExecDate())); map.put(KilkariConstants.CASE_NO, mctsBeneficiaryValueProcessor.getCaseNoByString(motherRecord.getCaseNo().toString())); return map; } private void toMapLocMother(Map<String, Object> map, RchMotherRecord motherRecord) { map.put(KilkariConstants.STATE_ID, motherRecord.getStateId()); map.put(KilkariConstants.DISTRICT_ID, motherRecord.getDistrictId()); map.put(KilkariConstants.DISTRICT_NAME, motherRecord.getDistrictName()); map.put(KilkariConstants.TALUKA_ID, motherRecord.getTalukaId()); map.put(KilkariConstants.TALUKA_NAME, motherRecord.getTalukaName()); map.put(KilkariConstants.HEALTH_BLOCK_ID, motherRecord.getHealthBlockId()); map.put(KilkariConstants.HEALTH_BLOCK_NAME, motherRecord.getHealthBlockName()); map.put(KilkariConstants.PHC_ID, motherRecord.getPhcId()); map.put(KilkariConstants.PHC_NAME, motherRecord.getPhcName()); map.put(KilkariConstants.SUB_CENTRE_ID, motherRecord.getSubCentreId()); map.put(KilkariConstants.SUB_CENTRE_NAME, motherRecord.getSubCentreName()); map.put(KilkariConstants.CENSUS_VILLAGE_ID, motherRecord.getVillageId()); map.put(KilkariConstants.VILLAGE_NAME, motherRecord.getVillageName()); } private void toMapDistrict(Map<String, Object> map, RchDistrictRecord districtRecord, Long stateCode) { map.put(KilkariConstants.CSV_STATE_ID, stateCode); map.put(KilkariConstants.DISTRICT_ID, districtRecord.getDistrictCode()); map.put(KilkariConstants.DISTRICT_NAME, districtRecord.getDistrictName()); map.put(KilkariConstants.EXEC_DATE, districtRecord.getExecDate()); } private void toMapTaluka(Map<String, Object> map, RchTalukaRecord talukaRecord, Long stateCode) { map.put(KilkariConstants.CSV_STATE_ID, stateCode); map.put(KilkariConstants.DISTRICT_ID, talukaRecord.getDistrictCode()); map.put(KilkariConstants.TALUKA_ID, talukaRecord.getTalukaCode()); map.put(KilkariConstants.TALUKA_NAME, talukaRecord.getTalukaName()); map.put(KilkariConstants.EXEC_DATE, talukaRecord.getExecDate()); } private void toMapVillage(Map<String, Object> map, RchVillageRecord villageRecord, Long stateCode) { map.put(KilkariConstants.CSV_STATE_ID, stateCode); map.put(KilkariConstants.DISTRICT_ID, villageRecord.getDistrictCode()); map.put(KilkariConstants.TALUKA_ID, villageRecord.getTalukaCode()); map.put(KilkariConstants.CENSUS_VILLAGE_ID, villageRecord.getVillageCode()); map.put(KilkariConstants.VILLAGE_NAME, villageRecord.getVillageName()); map.put(KilkariConstants.EXEC_DATE, villageRecord.getExecDate()); } private void toMapHealthBlock(Map<String, Object> map, RchHealthBlockRecord healthBlockRecord, Long stateCode) { map.put(KilkariConstants.CSV_STATE_ID, stateCode); map.put(KilkariConstants.DISTRICT_ID, healthBlockRecord.getDistrictCode()); map.put(KilkariConstants.TALUKA_ID, healthBlockRecord.getTalukaCode()); map.put(KilkariConstants.HEALTH_BLOCK_ID, healthBlockRecord.getHealthBlockCode()); map.put(KilkariConstants.HEALTH_BLOCK_NAME, healthBlockRecord.getHealthBlockName()); map.put(KilkariConstants.EXEC_DATE, healthBlockRecord.getExecDate()); } private void toMapTalukaHealthBlock(Map<String, Object> map, RchTalukaHealthBlockRecord talukaHealthBlockRecord, Long stateCode) { map.put(KilkariConstants.CSV_STATE_ID, stateCode); map.put(KilkariConstants.TALUKA_ID, talukaHealthBlockRecord.getTalukaCode()); map.put(KilkariConstants.HEALTH_BLOCK_ID, talukaHealthBlockRecord.getHealthBlockCode()); map.put(KilkariConstants.EXEC_DATE, talukaHealthBlockRecord.getExecDate()); } private void toMapHealthFacility(Map<String, Object> map, RchHealthFacilityRecord healthFacilityRecord, Long stateCode) { map.put(KilkariConstants.CSV_STATE_ID, stateCode); map.put(KilkariConstants.DISTRICT_ID, healthFacilityRecord.getDistrictCode()); map.put(KilkariConstants.TALUKA_ID, healthFacilityRecord.getTalukaCode()); map.put(KilkariConstants.HEALTH_BLOCK_ID, healthFacilityRecord.getHealthBlockCode()); map.put(KilkariConstants.HEALTH_FACILITY_ID, healthFacilityRecord.getHealthFacilityCode()); map.put(KilkariConstants.HEALTH_FACILITY_NAME, healthFacilityRecord.getHealthFacilityName()); map.put(KilkariConstants.EXEC_DATE, healthFacilityRecord.getExecDate()); } private void toMapHealthSubFacility(Map<String, Object> map, RchHealthSubFacilityRecord healthSubFacilityRecord, Long stateCode) { map.put(KilkariConstants.CSV_STATE_ID, stateCode); map.put(KilkariConstants.DISTRICT_ID, healthSubFacilityRecord.getDistrictCode()); map.put(KilkariConstants.TALUKA_ID, healthSubFacilityRecord.getTalukaCode()); map.put(KilkariConstants.HEALTH_FACILITY_ID, healthSubFacilityRecord.getHealthFacilityCode()); map.put(KilkariConstants.HEALTH_SUB_FACILITY_ID, healthSubFacilityRecord.getHealthSubFacilityCode()); map.put(KilkariConstants.HEALTH_SUB_FACILITY_NAME, healthSubFacilityRecord.getHealthSubFacilityName()); map.put(KilkariConstants.EXEC_DATE, healthSubFacilityRecord.getExecDate()); } private void toMapVillageHealthSubFacility(Map<String, Object> map, RchVillageHealthSubFacilityRecord villageHealthSubFacilityRecord, Long stateCode) { map.put(KilkariConstants.CSV_STATE_ID, stateCode); map.put(KilkariConstants.DISTRICT_ID, villageHealthSubFacilityRecord.getDistrictCode()); map.put(KilkariConstants.CENSUS_VILLAGE_ID, villageHealthSubFacilityRecord.getVillageCode()); map.put(KilkariConstants.HEALTH_SUB_FACILITY_ID, villageHealthSubFacilityRecord.getHealthSubFacilityCode()); map.put(KilkariConstants.EXEC_DATE, villageHealthSubFacilityRecord.getExecDate()); } private Map<String, Object> toMap(RchChildRecord childRecord) { Map<String, Object> map = new HashMap<>(); toMapLocChild(map, childRecord); map.put(KilkariConstants.BENEFICIARY_NAME, childRecord.getName()); map.put(KilkariConstants.MOBILE_NO, mctsBeneficiaryValueProcessor.getMsisdnByString(childRecord.getMobileNo())); map.put(KilkariConstants.DOB, mctsBeneficiaryValueProcessor.getDateByString(childRecord.getBirthdate())); map.put(KilkariConstants.MCTS_ID, childRecord.getMctsId()); map.put(KilkariConstants.MCTS_MOTHER_ID, mctsBeneficiaryValueProcessor.getMotherInstanceByBeneficiaryId(childRecord.getMctsMotherIdNo()) == null ? null : mctsBeneficiaryValueProcessor.getMotherInstanceByBeneficiaryId(childRecord.getMctsMotherIdNo()).getBeneficiaryId()); map.put(KilkariConstants.RCH_ID, childRecord.getRegistrationNo()); map.put(KilkariConstants.RCH_MOTHER_ID, childRecord.getMotherRegistrationNo()); map.put(KilkariConstants.DEATH, mctsBeneficiaryValueProcessor.getDeathFromString(String.valueOf(childRecord.getEntryType()))); map.put(KilkariConstants.EXECUTION_DATE, "".equals(childRecord.getExecDate()) ? null : mctsBeneficiaryValueProcessor.getLocalDateByString(childRecord.getExecDate())); return map; } private void toMapLocChild(Map<String, Object> map, RchChildRecord childRecord) { map.put(KilkariConstants.STATE_ID, childRecord.getStateId()); map.put(KilkariConstants.DISTRICT_ID, childRecord.getDistrictId()); map.put(KilkariConstants.DISTRICT_NAME, childRecord.getDistrictName()); map.put(KilkariConstants.TALUKA_ID, childRecord.getTalukaId()); map.put(KilkariConstants.TALUKA_NAME, childRecord.getTalukaName()); map.put(KilkariConstants.HEALTH_BLOCK_ID, childRecord.getHealthBlockId()); map.put(KilkariConstants.HEALTH_BLOCK_NAME, childRecord.getHealthBlockName()); map.put(KilkariConstants.PHC_ID, childRecord.getPhcId()); map.put(KilkariConstants.PHC_NAME, childRecord.getPhcName()); map.put(KilkariConstants.SUB_CENTRE_ID, childRecord.getSubCentreId()); map.put(KilkariConstants.SUB_CENTRE_NAME, childRecord.getSubCentreName()); map.put(KilkariConstants.CENSUS_VILLAGE_ID, childRecord.getVillageId()); map.put(KilkariConstants.VILLAGE_NAME, childRecord.getVillageName()); } private void toMapLoc(Map<String, Object> map, RchAnmAshaRecord anmAshaRecord) { map.put(KilkariConstants.STATE_ID, anmAshaRecord.getStateId()); map.put(KilkariConstants.DISTRICT_ID, anmAshaRecord.getDistrictId()); map.put(KilkariConstants.DISTRICT_NAME, anmAshaRecord.getDistrictName()); map.put(KilkariConstants.TALUKA_ID, anmAshaRecord.getTalukaId()); map.put(KilkariConstants.TALUKA_NAME, anmAshaRecord.getTalukaName()); map.put(KilkariConstants.HEALTH_BLOCK_ID, anmAshaRecord.getHealthBlockId()); map.put(KilkariConstants.HEALTH_BLOCK_NAME, anmAshaRecord.getHealthBlockName()); map.put(KilkariConstants.PHC_ID, anmAshaRecord.getPhcId()); map.put(KilkariConstants.PHC_NAME, anmAshaRecord.getPhcName()); map.put(KilkariConstants.SUB_CENTRE_ID, anmAshaRecord.getSubCentreId()); map.put(KilkariConstants.SUB_CENTRE_NAME, anmAshaRecord.getSubCentreName()); map.put(KilkariConstants.CENSUS_VILLAGE_ID, anmAshaRecord.getVillageId()); map.put(KilkariConstants.VILLAGE_NAME, anmAshaRecord.getVillageName()); } private Map<Long, Set<Long>> getHpdFilters() { Map<Long, Set<Long>> hpdMap = new HashMap<>(); String locationProp = settingsFacade.getProperty(Constants.HPD_STATES); if (StringUtils.isBlank(locationProp)) { return hpdMap; } String[] locationParts = StringUtils.split(locationProp, ','); for (String locationPart : locationParts) { Long stateId = Long.valueOf(locationPart); hpdMap.put(stateId, getHpdForState(stateId)); } return hpdMap; } private Set<Long> getHpdForState(Long stateId) { Set<Long> districtSet = new HashSet<>(); String hpdProp = settingsFacade.getProperty(Constants.BASE_HPD_CONFIG + stateId); if (StringUtils.isBlank(hpdProp)) { return districtSet; } String[] districtParts = StringUtils.split(hpdProp, ','); for (String districtPart : districtParts) { districtSet.add(Long.valueOf(districtPart)); } return districtSet; } private boolean validateHpdUser(Map<Long, Set<Long>> hpdFilters, long stateId, long districtId) { // if we have the state for hpd filter if (hpdFilters.containsKey(stateId)) { // if district exists in the hpd filter set Set<Long> districtSet = hpdFilters.get(stateId); if (districtSet != null) { return districtSet.contains(districtId); } } return true; } @Transactional private void deleteRchImportFailRecords(final LocalDate startReferenceDate, final LocalDate endReferenceDate, final RchUserType rchUserType, final Long stateId) { LOGGER.debug("Deleting nms_rch_failures records which are successfully imported"); if (startReferenceDate.equals(endReferenceDate)) { LOGGER.debug("No failed imports in the past 7days "); } else { QueryParams queryParams = new QueryParams(new Order("importDate", Order.Direction.ASC)); List<RchImportFailRecord> failedImports = rchImportFailRecordDataService.getByStateAndImportdateAndUsertype(stateId, startReferenceDate, rchUserType, queryParams); int counter = 0; for (RchImportFailRecord eachFailedImport : failedImports) { rchImportFailRecordDataService.delete(eachFailedImport); counter++; } LOGGER.debug("Deleted {} rows from nms_rch_failures", counter); } } private int sizeNullSafe(Collection collection) { return collection == null ? 0 : collection.size(); } private File localResponseDir() { return new File(this.settingsFacade.getProperty(LOCAL_RESPONSE_DIR)); } private void scpResponseToRemote(String fileName) { String remoteDir = settingsFacade.getProperty(REMOTE_RESPONSE_DIR); String command = "scp " + localResponseFile(fileName) + " " + remoteDir; ExecutionHelper execHelper = new ExecutionHelper(); execHelper.exec(command, getScpTimeout()); } private File scpResponseToLocal(String fileName) { String localDir = settingsFacade.getProperty(LOCAL_RESPONSE_DIR); String command = "scp " + remoteResponseFile(fileName) + " " + localDir; ExecutionHelper execHelper = new ExecutionHelper(); execHelper.exec(command, getScpTimeout()); return new File(localResponseFile(fileName)); } private File fileForLocUpdate(String fileName) { return new File(remoteResponseFile(fileName)); } private File fileForXmlLocUpdate(String fileName) { return new File(remoteResponseFileForXml(fileName)); } public String localResponseFile(String file) { String localFile = settingsFacade.getProperty(LOCAL_RESPONSE_DIR); localFile += localFile.endsWith("/") ? "" : "/"; localFile += file; return localFile; } public String remoteResponseFile(String file) { String remoteFile = settingsFacade.getProperty(REMOTE_RESPONSE_DIR); remoteFile += remoteFile.endsWith("/") ? "" : "/"; remoteFile += file; return remoteFile; } public String remoteResponseFileForXml(String file) { String remoteFile = settingsFacade.getProperty(REMOTE_RESPONSE_DIR_XML); remoteFile += remoteFile.endsWith("/") ? "" : "/"; remoteFile += file; return remoteFile; } private Long getScpTimeout() { try { return Long.parseLong(settingsFacade.getProperty(SCP_TIMEOUT_SETTING)); } catch (NumberFormatException e) { return SCP_TIME_OUT; } } private DS_DataResponseDS_DataResult readResponses(File file) throws RchFileManipulationException { try { FileReader reader = new FileReader(file); BufferedReader br = new BufferedReader(reader); String xml = ""; String currentLine; while ((currentLine = br.readLine()) != null) { xml += currentLine; } return (DS_DataResponseDS_DataResult) deserializeAxisObject(DS_DataResponseDS_DataResult.class, xml); } catch (Exception e) { throw new RchFileManipulationException("Failed to read response file."); //NOPMD } } private String readResponsesFromXml(File file) throws RchFileManipulationException { try { String xmlString; String string = FileUtils.readFileToString(file); String[] newDataSetArr = string.split("NewDataSet"); if(newDataSetArr.length > 2) { xmlString = "<NewDataSet" + newDataSetArr[3] + "NewDataSet>"; xmlString = xmlString.replaceAll("\\n", " "); xmlString = xmlString.replaceAll("<Records diffgr:id=\"Records[0-9]+\" msdata:rowOrder=\"[0-9]+\">", "<Records >"); return xmlString; } } catch (Exception e) { throw new RchFileManipulationException("Failed to read response file.", e); //NOPMD } return ""; } private String serializeAxisObject(Object obj) throws IOException { try { if (obj == null) { return null; } StringWriter outStr = new StringWriter(); TypeDesc typeDesc = getAxisTypeDesc(obj); QName qname = typeDesc.getXmlType(); String lname = qname.getLocalPart(); if (lname.startsWith(">") && lname.length() > 1) { lname = lname.substring(1); } qname = new QName(qname.getNamespaceURI(), lname); AxisServer server = new AxisServer(); BeanSerializer ser = new BeanSerializer(obj.getClass(), qname, typeDesc); SerializationContext ctx = new SerializationContext(outStr, new MessageContext(server)); ctx.setSendDecl(false); ctx.setDoMultiRefs(false); ctx.setPretty(true); try { ser.serialize(qname, new AttributesImpl(), obj, ctx); } catch (final Exception e) { throw new Exception("Unable to serialize object " + obj.getClass().getName(), e); } String xml = outStr.toString(); return xml; //NOPMD } catch (Exception e) { throw new IOException("Serialization failed", e); } } private Object deserializeAxisObject(Class<?> cls, String xml) throws IOException { //CHECKSTYLE:OFF try { final String SOAP_START = "<soapenv:Envelope xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\"><soapenv:Header /><soapenv:Body>"; final String SOAP_START_XSI = "<soapenv:Envelope xmlns:soapenv=\"http: final String SOAP_END = "</soapenv:Body></soapenv:Envelope>"; //CHECKSTYLE:ON Object result = null; try { Message message = new Message(SOAP_START + xml + SOAP_END); result = message.getSOAPEnvelope().getFirstBody() .getObjectValue(cls); } catch (Exception e) { try { Message message = new Message(SOAP_START_XSI + xml + SOAP_END); result = message.getSOAPEnvelope().getFirstBody() .getObjectValue(cls); } catch (Exception e1) { throw new Exception(e1); //NOPMD } } return result; } catch (Exception e) { throw new IOException("Deserialization failed", e); //NOPMD } } private TypeDesc getAxisTypeDesc(Object obj) throws Exception { //NOPMD final Class<? extends Object> objClass = obj.getClass(); try { final Method methodGetTypeDesc = objClass.getMethod("getTypeDesc", new Class[]{}); final TypeDesc typeDesc = (TypeDesc) methodGetTypeDesc.invoke(obj, new Object[]{}); return (typeDesc); } catch (final Exception e) { throw new Exception("Unable to get Axis TypeDesc for " + objClass.getName(), e); //NOPMD } } private String rchFlwActionFinder(RchAnmAshaRecord record) { if (frontLineWorkerService.getByMctsFlwIdAndState(record.getGfId().toString(), stateDataService.findByCode(record.getStateId())) == null) { LOGGER.info("create"); return "CREATE"; } else { LOGGER.info("update"); return "UPDATE"; } } @Transactional public void locationUpdateInTable(Long stateId, RchUserType rchUserType) { try { List<RchImportFacilitator> rchImportFiles = rchImportFacilitatorService.findByStateIdAndRchUserType(stateId, rchUserType); Collections.sort(rchImportFiles, new Comparator<RchImportFacilitator>() { public int compare(RchImportFacilitator m1, RchImportFacilitator m2) { return m1.getImportDate().compareTo(m2.getImportDate()); //ascending order } }); for (RchImportFacilitator rchImportFile : rchImportFiles ) { File remoteResponseFile = fileForXmlLocUpdate(rchImportFile.getFileName()); if (remoteResponseFile.exists() && !remoteResponseFile.isDirectory()) { LOGGER.debug("Started reading file {}.", rchImportFile.getFileName()); String result = readResponsesFromXml(remoteResponseFile); LOGGER.debug("Completed Reading Responses"); if (result.contains(RECORDS)) { if (rchUserType == RchUserType.MOTHER) { motherLocUpdate(result, stateId, rchUserType); } else if (rchUserType == RchUserType.CHILD) { childLocUpdate(result, stateId, rchUserType); } else if (rchUserType == RchUserType.ASHA) { ashaLocUpdate(result, stateId, rchUserType); } } else { String warning = String.format("No mother data set received from RCH for %d stateId", stateId); LOGGER.warn(warning); } } else { continue; } } } catch (ExecutionException e) { LOGGER.error("Failed to copy file from remote server to local directory." + e); } catch (RchFileManipulationException e) { LOGGER.error("No files saved d : {}", e); } } @Transactional public void locationUpdateInTableFromCsv(Long stateId, RchUserType rchUserType) throws IOException { List<MultipartFile> rchImportFiles = findByStateIdAndRchUserType(stateId, rchUserType); Collections.sort(rchImportFiles, new Comparator<MultipartFile>() { public int compare(MultipartFile m1, MultipartFile m2) { Date file1Date; Date file2Date; int flag = 1; try { file1Date = getDateFromFileName(m1.getOriginalFilename()); file2Date = getDateFromFileName(m2.getOriginalFilename()); flag = file1Date.compareTo(file2Date); } catch (ParseException e) { e.printStackTrace(); } return flag; //ascending order } }); for (MultipartFile rchImportFile : rchImportFiles) { try (InputStream in = rchImportFile.getInputStream()) { BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in)); Map<String, CellProcessor> cellProcessorMapper; List<Map<String, Object>> recordList; LOGGER.debug("Started reading file {}.", rchImportFile.getOriginalFilename()); if (rchUserType == RchUserType.MOTHER) { cellProcessorMapper = mctsBeneficiaryImportService.getRchMotherProcessorMapping(); recordList = mctsBeneficiaryImportReaderService.readCsv(bufferedReader, cellProcessorMapper); motherLocUpdateFromCsv(recordList, stateId, rchUserType); } else if (rchUserType == RchUserType.CHILD) { cellProcessorMapper = mctsBeneficiaryImportReaderService.getRchChildProcessorMapping(); recordList = mctsBeneficiaryImportReaderService.readCsv(bufferedReader, cellProcessorMapper); childLocUpdateFromCsv(recordList, stateId, rchUserType); } else if (rchUserType == RchUserType.ASHA) { cellProcessorMapper = mctsBeneficiaryImportService.getRchAshaProcessorMapping(); recordList = mctsBeneficiaryImportReaderService.readCsv(bufferedReader, cellProcessorMapper); ashaLocUpdateFromCsv(recordList, stateId, rchUserType); } } } } @Override public String getLocationFilesDirectory() { return settingsFacade.getProperty(REMOTE_RESPONSE_DIR_LOCATION); } private void motherLocUpdate(String result, Long stateId, RchUserType rchUserType) { // NO CHECKSTYLE Cyclomatic Complexity try { ArrayList<Map<String, Object>> locArrList = new ArrayList<>(); RchMothersDataSet mothersDataSet = (result == null) ? null : (RchMothersDataSet) MarshallUtils.unmarshall(result, RchMothersDataSet.class); LOGGER.debug("Unmarshall Completed"); if (mothersDataSet == null || mothersDataSet.getRecords() == null) { String warning = String.format("No mother data set received from RCH for %d stateId", stateId); LOGGER.warn(warning); } else { List<RchMotherRecord> motherRecords = mothersDataSet.getRecords(); LOGGER.debug("Records read {}", motherRecords.size()); List<String> existingMotherIds = getDatabaseMothers(motherRecords); for (RchMotherRecord record : motherRecords) { if(existingMotherIds.contains(record.getRegistrationNo())) { Map<String, Object> locMap = new HashMap<>(); toMapLocMother(locMap, record); locMap.put(KilkariConstants.RCH_ID, record.getRegistrationNo()); locArrList.add(locMap); } } } if (!locArrList.isEmpty()) { updateLocInMap(locArrList, stateId, rchUserType); } } catch (JAXBException e) { throw new RchInvalidResponseStructureException(String.format("Cannot deserialize RCH mother data from %d stateId.", stateId), e); } catch (RchInvalidResponseStructureException e) { String error = String.format("Cannot read RCH mothers data from stateId: %d. Response Deserialization Error", stateId); LOGGER.error(error, e); } catch (IOException e) { LOGGER.error("Input output exception."); } catch (InvalidLocationException e) { LOGGER.error("Invalid location"); } } private void motherLocUpdateFromCsv(List<Map<String, Object>> result, Long stateId, RchUserType rchUserType) { try { ArrayList<Map<String, Object>> locArrList = new ArrayList<>(); List<RchMotherRecord> rchMotherRecords = new ArrayList<>(); for (Map<String, Object> record : result) { RchMotherRecord rchMotherRecord = convertMapToRchMother(record); rchMotherRecords.add(rchMotherRecord); } List<String> existingMotherIds = getDatabaseMothers(rchMotherRecords); for(RchMotherRecord rchMotherRecord : rchMotherRecords) { if (existingMotherIds.contains(rchMotherRecord.getRegistrationNo())) { Map<String, Object> locMap = new HashMap<>(); toMapLocMother(locMap, rchMotherRecord); locMap.put(KilkariConstants.RCH_ID, rchMotherRecord.getRegistrationNo()); locArrList.add(locMap); } } if (!locArrList.isEmpty()) { updateLocInMap(locArrList, stateId, rchUserType); } } catch (IOException e) { LOGGER.error("IO exception."); } catch (InvalidLocationException e) { LOGGER.error("Location Invalid"); } } private void childLocUpdate(String result, Long stateId, RchUserType rchUserType) { // NO CHECKSTYLE Cyclomatic Complexity try { ArrayList<Map<String, Object>> locArrList = new ArrayList<>(); RchChildrenDataSet childrenDataSet = (result == null) ? null : (RchChildrenDataSet) MarshallUtils.unmarshall(result, RchChildrenDataSet.class); if (childrenDataSet == null || childrenDataSet.getRecords() == null) { String warning = String.format("No child data set received from RCH for %d stateId", stateId); LOGGER.warn(warning); } else { List<RchChildRecord> childRecords = childrenDataSet.getRecords(); LOGGER.debug("Records read {}", childRecords.size()); List<String> existingChildIds = getDatabaseChild(childRecords); for (RchChildRecord record : childRecords) { if(existingChildIds.contains(record.getRegistrationNo())) { Map<String, Object> locMap = new HashMap<>(); toMapLocChild(locMap, record); locMap.put(KilkariConstants.RCH_ID, record.getRegistrationNo()); locArrList.add(locMap); } } } if (!locArrList.isEmpty()) { updateLocInMap(locArrList, stateId, rchUserType); } } catch (JAXBException e) { throw new RchInvalidResponseStructureException(String.format("Cannot deserialize RCH children data from %d stateId.", stateId), e); } catch (RchInvalidResponseStructureException e) { String error = String.format("Cannot read RCH children data from stateId:%d. Response Deserialization Error", stateId); LOGGER.error(error, e); } catch (IOException e) { LOGGER.error("Input output exception."); } catch (InvalidLocationException e) { LOGGER.error("Invalid location"); } } private void childLocUpdateFromCsv(List<Map<String, Object>> result, Long stateId, RchUserType rchUserType) { try { ArrayList<Map<String, Object>> locArrList = new ArrayList<>(); List<RchChildRecord> rchChildRecords = new ArrayList<>(); for (Map<String, Object> record : result) { RchChildRecord rchChildRecord = convertMapToRchChild(record); rchChildRecords.add(rchChildRecord); } List<String> existingMotherIds = getDatabaseChild(rchChildRecords); for(RchChildRecord rchChildRecord : rchChildRecords) { if (existingMotherIds.contains(rchChildRecord.getRegistrationNo())) { Map<String, Object> locMap = new HashMap<>(); toMapLocChild(locMap, rchChildRecord); locMap.put(KilkariConstants.RCH_ID, rchChildRecord.getRegistrationNo()); locArrList.add(locMap); } } if (!locArrList.isEmpty()) { updateLocInMap(locArrList, stateId, rchUserType); } } catch (IOException e) { LOGGER.error("IO exception."); } catch (InvalidLocationException e) { LOGGER.error("Location Invalid"); } } private void ashaLocUpdate(String result, Long stateId, RchUserType rchUserType) { // NO CHECKSTYLE Cyclomatic Complexity try { ArrayList<Map<String, Object>> locArrList = new ArrayList<>(); RchAnmAshaDataSet ashaDataSet = (result == null) ? null : (RchAnmAshaDataSet) MarshallUtils.unmarshall(result, RchAnmAshaDataSet.class); if (ashaDataSet == null || ashaDataSet.getRecords() == null) { String warning = String.format("No FLW data set received from RCH for %d stateId", stateId); LOGGER.warn(warning); } else { List<RchAnmAshaRecord> anmAshaRecords = ashaDataSet.getRecords(); LOGGER.debug("Records read {}", anmAshaRecords.size()); State state = stateDataService.findByCode(stateId); List<FrontLineWorker> existingAshas = getDatabaseAsha(anmAshaRecords,state.getId()); Map<String, Long> existingAshaIds = new HashMap<>(); List<String> mctsIds = new ArrayList<>(); for (FrontLineWorker asha : existingAshas) { existingAshaIds.put(asha.getMctsFlwId(), asha.getId()); mctsIds.add(asha.getMctsFlwId()); } for (RchAnmAshaRecord record : anmAshaRecords ) { if(mctsIds.contains(record.getGfId().toString())) { Map<String, Object> locMap = new HashMap<>(); toMapLoc(locMap, record); locMap.put(FlwConstants.ID, existingAshaIds.get(record.getGfId().toString())); locMap.put(FlwConstants.GF_ID, record.getGfId()); locArrList.add(locMap); } } } if (!locArrList.isEmpty()) { updateLocInMap(locArrList, stateId, rchUserType); } } catch (JAXBException e) { throw new RchInvalidResponseStructureException(String.format("Cannot deserialize RCH FLW data from %d stateId.", stateId), e); } catch (RchInvalidResponseStructureException e) { String error = String.format("Cannot read RCH FLW data from stateId:%d. Response Deserialization Error", stateId); LOGGER.error(error, e); } catch (IOException e) { LOGGER.error("Input output exception."); } catch (InvalidLocationException e) { LOGGER.error("Invalid location"); } } private void ashaLocUpdateFromCsv(List<Map<String, Object>> result, Long stateId, RchUserType rchUserType) { try { ArrayList<Map<String, Object>> locArrList = new ArrayList<>(); List<RchAnmAshaRecord> rchAshaRecords = new ArrayList<>(); for (Map<String, Object> record : result) { RchAnmAshaRecord rchAnmAshaRecord = frontLineWorkerImportService.convertMapToRchAsha(record); rchAshaRecords.add(rchAnmAshaRecord); } State state = stateDataService.findByCode(stateId); List<FrontLineWorker> existingAshas = getDatabaseAsha(rchAshaRecords,state.getId()); Map<String, Long> existingAshaIds = new HashMap<>(); List<String> mctsIds = new ArrayList<>(); for (FrontLineWorker asha : existingAshas) { existingAshaIds.put(asha.getMctsFlwId(), asha.getId()); mctsIds.add(asha.getMctsFlwId()); } for(RchAnmAshaRecord rchAnmAshaRecord : rchAshaRecords) { if (mctsIds.contains(rchAnmAshaRecord.getGfId().toString())) { Map<String, Object> locMap = new HashMap<>(); toMapLoc(locMap, rchAnmAshaRecord); locMap.put(FlwConstants.ID, existingAshaIds.get(rchAnmAshaRecord.getGfId().toString())); locMap.put(FlwConstants.GF_ID, rchAnmAshaRecord.getGfId()); locArrList.add(locMap); } } if (!locArrList.isEmpty()) { updateLocInMap(locArrList, stateId, rchUserType); } } catch (IOException e) { LOGGER.error("IO exception."); } catch (InvalidLocationException e) { LOGGER.error("Location Invalid"); } } public Map<String, Object> setLocationFields(LocationFinder locationFinder, Map<String, Object> record) throws InvalidLocationException { //NO CHECKSTYLE Cyclomatic Complexity Map<String, Object> updatedLoc = new HashMap<>(); String mapKey = record.get(KilkariConstants.STATE_ID).toString(); if (isValidID(record, KilkariConstants.STATE_ID) && (locationFinder.getStateHashMap().get(mapKey) != null)) { updatedLoc.put(KilkariConstants.STATE_ID, locationFinder.getStateHashMap().get(mapKey).getId()); String districtCode = record.get(KilkariConstants.DISTRICT_ID).toString(); mapKey += "_"; mapKey += districtCode; if (isValidID(record, KilkariConstants.DISTRICT_ID) && (locationFinder.getDistrictHashMap().get(mapKey) != null)) { updatedLoc.put(KilkariConstants.DISTRICT_ID, locationFinder.getDistrictHashMap().get(mapKey).getId()); updatedLoc.put(KilkariConstants.DISTRICT_NAME, locationFinder.getDistrictHashMap().get(mapKey).getName()); Long talukaCode = Long.parseLong(record.get(KilkariConstants.TALUKA_ID) == null ? "0" : record.get(KilkariConstants.TALUKA_ID).toString().trim()); mapKey += "_"; mapKey += talukaCode; Taluka taluka = locationFinder.getTalukaHashMap().get(mapKey); updatedLoc.put(KilkariConstants.TALUKA_ID, taluka == null ? null : taluka.getId()); updatedLoc.put(KilkariConstants.TALUKA_NAME, taluka == null ? null : taluka.getName()); String villageSvid = record.get(KilkariConstants.NON_CENSUS_VILLAGE_ID) == null ? "0" : record.get(KilkariConstants.NON_CENSUS_VILLAGE_ID).toString(); String villageCode = record.get(KilkariConstants.CENSUS_VILLAGE_ID) == null ? "0" : record.get(KilkariConstants.CENSUS_VILLAGE_ID).toString(); String healthBlockCode = record.get(KilkariConstants.HEALTH_BLOCK_ID) == null ? "0" : record.get(KilkariConstants.HEALTH_BLOCK_ID).toString(); String healthFacilityCode = record.get(KilkariConstants.PHC_ID) == null ? "0" : record.get(KilkariConstants.PHC_ID).toString(); String healthSubFacilityCode = record.get(KilkariConstants.SUB_CENTRE_ID) == null ? "0" : record.get(KilkariConstants.SUB_CENTRE_ID).toString(); Village village = locationFinder.getVillageHashMap().get(mapKey + "_" + Long.parseLong(villageCode) + "_" + Long.parseLong(villageSvid)); updatedLoc.put(KilkariConstants.CENSUS_VILLAGE_ID, village == null ? null : village.getId()); updatedLoc.put(KilkariConstants.VILLAGE_NAME, village == null ? null : village.getName()); mapKey = record.get(KilkariConstants.STATE_ID).toString() + "_" + districtCode; mapKey += "_"; mapKey += Long.parseLong(healthBlockCode); HealthBlock healthBlock = locationFinder.getHealthBlockHashMap().get(mapKey); updatedLoc.put(KilkariConstants.HEALTH_BLOCK_ID, healthBlock == null ? null : healthBlock.getId()); updatedLoc.put(KilkariConstants.HEALTH_BLOCK_NAME, healthBlock == null ? null : healthBlock.getName()); mapKey += "_"; mapKey += Long.parseLong(healthFacilityCode); HealthFacility healthFacility = locationFinder.getHealthFacilityHashMap().get(mapKey); updatedLoc.put(KilkariConstants.PHC_ID, healthFacility == null ? null : healthFacility.getId()); updatedLoc.put(KilkariConstants.PHC_NAME, healthFacility == null ? null : healthFacility.getName()); mapKey += "_"; mapKey += Long.parseLong(healthSubFacilityCode); HealthSubFacility healthSubFacility = locationFinder.getHealthSubFacilityHashMap().get(mapKey); updatedLoc.put(KilkariConstants.SUB_CENTRE_ID, healthSubFacility == null ? null : healthSubFacility.getId()); updatedLoc.put(KilkariConstants.SUB_CENTRE_NAME, healthSubFacility == null ? null : healthSubFacility.getName()); return updatedLoc; } else { throw new InvalidLocationException(String.format(KilkariConstants.INVALID_LOCATION, KilkariConstants.DISTRICT_ID, record.get(KilkariConstants.DISTRICT_ID))); } } else { throw new InvalidLocationException(String.format(KilkariConstants.INVALID_LOCATION, KilkariConstants.STATE_ID, record.get(KilkariConstants.STATE_ID))); } } private boolean isValidID(final Map<String, Object> map, final String key) { Object obj = map.get(key); if (obj == null || obj.toString().isEmpty() || "NULL".equalsIgnoreCase(obj.toString())) { return false; } if (obj.getClass().equals(Long.class)) { return (Long) obj > 0L; } return !"0".equals(obj); } private List<MultipartFile> findByStateIdAndRchUserType(Long stateId, RchUserType rchUserType) throws IOException { ArrayList <MultipartFile> csvFilesByStateIdAndRchUserType = new ArrayList<>(); String locUpdateDir = settingsFacade.getProperty(REMOTE_RESPONSE_DIR_CSV); File file = new File(locUpdateDir); File[] files = file.listFiles(); if (files != null) { for(File f: files){ String[] fileNameSplitter = f.getName().split("_"); if(Objects.equals(fileNameSplitter[2], stateId.toString()) && fileNameSplitter[3].equalsIgnoreCase(rchUserType.toString())){ try { FileInputStream input = new FileInputStream(f); MultipartFile multipartFile = new MockMultipartFile("file", f.getName(), "text/plain", IOUtils.toByteArray(input)); csvFilesByStateIdAndRchUserType.add(multipartFile); }catch(IOException e) { LOGGER.debug("IO Exception", e); } } } } return csvFilesByStateIdAndRchUserType; } private void updateLocInMap(List<Map<String, Object>> locArrList, Long stateId, RchUserType rchUserType) throws InvalidLocationException, IOException { ArrayList<Map<String, Object>> updatedLocArrList = new ArrayList<>(); LocationFinder locationFinder = locationService.updateLocations(locArrList); for (Map<String, Object> record : locArrList ) { Map<String, Object> updatedMap = setLocationFields(locationFinder, record); if("asha".equalsIgnoreCase(rchUserType.toString())){ updatedMap.put(FlwConstants.GF_ID, record.get(FlwConstants.GF_ID)); updatedMap.put(FlwConstants.ID, record.get(FlwConstants.ID)); }else { updatedMap.put(KilkariConstants.RCH_ID, record.get(KilkariConstants.RCH_ID)); } updatedLocArrList.add(updatedMap); } if ("asha".equalsIgnoreCase(rchUserType.toString())) { csvWriterAsha(updatedLocArrList, stateId, rchUserType); }else { csvWriterKilkari(updatedLocArrList, stateId, rchUserType); } } @Override public String getBeneficiaryLocationUpdateDirectory() { return settingsFacade.getProperty(LOC_UPDATE_DIR_RCH); } private File csvWriter(Long stateId, RchUserType rchUserType) throws IOException { String locUpdateDir = settingsFacade.getProperty(LOC_UPDATE_DIR_RCH); String fileName = locUpdateDir + "location_update_state" + "_" + stateId + "_" + rchUserType + "_" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").format(new Date()) + ".csv"; File csvFile = new File(fileName); if (!csvFile.exists()){ csvFile.createNewFile(); } else { LOGGER.debug("File already exists"); } return csvFile; } private void csvWriterKilkari(List<Map<String, Object>> locArrList, Long stateId, RchUserType rchUserType) throws IOException { //NO CHECKSTYLE Cyclomatic Complexity //NOPMD NcssMethodCount if (!locArrList.isEmpty()) { File csvFile = csvWriter(stateId, rchUserType); FileWriter writer; writer = new FileWriter(csvFile, true); writer.write(KilkariConstants.RCH_ID); writer.write(TAB); writer.write(KilkariConstants.STATE_ID); writer.write(TAB); writer.write(KilkariConstants.DISTRICT_ID); writer.write(TAB); writer.write(KilkariConstants.DISTRICT_NAME); writer.write(TAB); writer.write(KilkariConstants.TALUKA_ID); writer.write(TAB); writer.write(KilkariConstants.TALUKA_NAME); writer.write(TAB); writer.write(KilkariConstants.HEALTH_BLOCK_ID); writer.write(TAB); writer.write(KilkariConstants.HEALTH_BLOCK_NAME); writer.write(TAB); writer.write(KilkariConstants.PHC_ID); writer.write(TAB); writer.write(KilkariConstants.PHC_NAME); writer.write(TAB); writer.write(KilkariConstants.SUB_CENTRE_ID); writer.write(TAB); writer.write(KilkariConstants.SUB_CENTRE_NAME); writer.write(TAB); writer.write(KilkariConstants.CENSUS_VILLAGE_ID); writer.write(TAB); writer.write(KilkariConstants.VILLAGE_NAME); writer.write(NEXT_LINE); for (Map<String, Object> map : locArrList ) { writer.write(map.get(KilkariConstants.RCH_ID).toString()); writer.write(TAB); writer.write(map.get(KilkariConstants.STATE_ID).toString()); writer.write(TAB); writer.write(map.get(KilkariConstants.DISTRICT_ID).toString()); writer.write(TAB); writer.write(map.get(KilkariConstants.DISTRICT_NAME).toString()); writer.write(TAB); writer.write(map.get(KilkariConstants.TALUKA_ID) == null ? "" : map.get(KilkariConstants.TALUKA_ID).toString()); writer.write(TAB); writer.write(map.get(KilkariConstants.TALUKA_NAME) == null ? "" : map.get(KilkariConstants.TALUKA_NAME).toString()); writer.write(TAB); writer.write(map.get(KilkariConstants.HEALTH_BLOCK_ID) == null ? "" : map.get(KilkariConstants.HEALTH_BLOCK_ID).toString()); writer.write(TAB); writer.write(map.get(KilkariConstants.HEALTH_BLOCK_NAME) == null ? "" : map.get(KilkariConstants.HEALTH_BLOCK_NAME).toString()); writer.write(TAB); writer.write(map.get(KilkariConstants.PHC_ID) == null ? "" : map.get(KilkariConstants.PHC_ID).toString()); writer.write(TAB); writer.write(map.get(KilkariConstants.PHC_NAME) == null ? "" : map.get(KilkariConstants.PHC_NAME).toString()); writer.write(TAB); writer.write(map.get(KilkariConstants.SUB_CENTRE_ID) == null ? "" : map.get(KilkariConstants.SUB_CENTRE_ID).toString()); writer.write(TAB); writer.write(map.get(KilkariConstants.SUB_CENTRE_NAME) == null ? "" : map.get(KilkariConstants.SUB_CENTRE_NAME).toString()); writer.write(TAB); writer.write(map.get(KilkariConstants.CENSUS_VILLAGE_ID) == null ? "" : map.get(KilkariConstants.CENSUS_VILLAGE_ID).toString()); writer.write(TAB); writer.write(map.get(KilkariConstants.VILLAGE_NAME) == null ? "" : map.get(KilkariConstants.VILLAGE_NAME).toString()); writer.write(NEXT_LINE); } writer.close(); } } private void csvWriterAsha(List<Map<String, Object>> locArrList, Long stateId, RchUserType rchUserType) throws IOException { //NO CHECKSTYLE Cyclomatic Complexity //NOPMD NcssMethodCount if (!locArrList.isEmpty()) { File csvFile = csvWriter(stateId, rchUserType); FileWriter writer; writer = new FileWriter(csvFile, true); writer.write(FlwConstants.ID); writer.write(TAB); writer.write(FlwConstants.GF_ID); writer.write(TAB); writer.write(FlwConstants.STATE_ID); writer.write(TAB); writer.write(FlwConstants.DISTRICT_ID); writer.write(TAB); writer.write(FlwConstants.DISTRICT_NAME); writer.write(TAB); writer.write(FlwConstants.TALUKA_ID); writer.write(TAB); writer.write(FlwConstants.TALUKA_NAME); writer.write(TAB); writer.write(FlwConstants.HEALTH_BLOCK_ID); writer.write(TAB); writer.write(FlwConstants.HEALTH_BLOCK_NAME); writer.write(TAB); writer.write(FlwConstants.PHC_ID); writer.write(TAB); writer.write(FlwConstants.PHC_NAME); writer.write(TAB); writer.write(FlwConstants.SUB_CENTRE_ID); writer.write(TAB); writer.write(FlwConstants.SUB_CENTRE_NAME); writer.write(TAB); writer.write(FlwConstants.CENSUS_VILLAGE_ID); writer.write(TAB); writer.write(FlwConstants.VILLAGE_NAME); writer.write(NEXT_LINE); for (Map<String, Object> map : locArrList ) { writer.write(map.get(FlwConstants.ID).toString()); writer.write(TAB); writer.write(map.get(FlwConstants.GF_ID).toString()); writer.write(TAB); writer.write(map.get(FlwConstants.STATE_ID).toString()); writer.write(TAB); writer.write(map.get(FlwConstants.DISTRICT_ID).toString()); writer.write(TAB); writer.write(map.get(FlwConstants.DISTRICT_NAME).toString()); writer.write(TAB); writer.write(map.get(FlwConstants.TALUKA_ID) == null ? "" : map.get(FlwConstants.TALUKA_ID).toString()); writer.write(TAB); writer.write(map.get(FlwConstants.TALUKA_NAME) == null ? "" : map.get(FlwConstants.TALUKA_NAME).toString()); writer.write(TAB); writer.write(map.get(FlwConstants.HEALTH_BLOCK_ID) == null ? "" : map.get(FlwConstants.HEALTH_BLOCK_ID).toString()); writer.write(TAB); writer.write(map.get(FlwConstants.HEALTH_BLOCK_NAME) == null ? "" : map.get(FlwConstants.HEALTH_BLOCK_NAME).toString()); writer.write(TAB); writer.write(map.get(FlwConstants.PHC_ID) == null ? "" : map.get(FlwConstants.PHC_ID).toString()); writer.write(TAB); writer.write(map.get(FlwConstants.PHC_NAME) == null ? "" : map.get(FlwConstants.PHC_NAME).toString()); writer.write(TAB); writer.write(map.get(FlwConstants.SUB_CENTRE_ID) == null ? "" : map.get(FlwConstants.SUB_CENTRE_ID).toString()); writer.write(TAB); writer.write(map.get(FlwConstants.SUB_CENTRE_NAME) == null ? "" : map.get(FlwConstants.SUB_CENTRE_NAME).toString()); writer.write(TAB); writer.write(map.get(FlwConstants.CENSUS_VILLAGE_ID) == null ? "" : map.get(FlwConstants.CENSUS_VILLAGE_ID).toString()); writer.write(TAB); writer.write(map.get(FlwConstants.VILLAGE_NAME) == null ? "" : map.get(FlwConstants.VILLAGE_NAME).toString()); writer.write(NEXT_LINE); } writer.close(); } } private List<String> getDatabaseMothers(final List<RchMotherRecord> motherRecords) { Timer queryTimer = new Timer(); @SuppressWarnings("unchecked") SqlQueryExecution<List<String>> queryExecution = new SqlQueryExecution<List<String>>() { @Override public String getSqlQuery() { String query = "SELECT rchId FROM nms_mcts_mothers WHERE rchId IN " + queryIdList(motherRecords); LOGGER.debug(SQL_QUERY_LOG, query); return query; } @Override public List<String> execute(Query query) { ForwardQueryResult fqr = (ForwardQueryResult) query.execute(); List<String> result = new ArrayList<>(); for (String existingMotherId : (List<String>) fqr) { result.add(existingMotherId); } return result; } }; List<String> result = (List<String>) rchImportFacilitatorDataService.executeSQLQuery(queryExecution); LOGGER.debug("Database mothers query time {}", queryTimer.time()); return result; } private String queryIdList(List<RchMotherRecord> motherRecords) { StringBuilder stringBuilder = new StringBuilder(); int i = 0; stringBuilder.append("("); for (RchMotherRecord motherRecord: motherRecords) { if (i != 0) { stringBuilder.append(", "); } stringBuilder.append(QUOTATION + motherRecord.getRegistrationNo() + QUOTATION); i++; } stringBuilder.append(")"); return stringBuilder.toString(); } private List<String> getDatabaseChild(final List<RchChildRecord> childRecords) { Timer queryTimer = new Timer(); @SuppressWarnings("unchecked") SqlQueryExecution<List<String>> queryExecution = new SqlQueryExecution<List<String>>() { @Override public String getSqlQuery() { String query = "SELECT rchId FROM nms_mcts_children WHERE rchId IN " + queryIdListChildren(childRecords); LOGGER.debug(SQL_QUERY_LOG, query); return query; } @Override public List<String> execute(Query query) { ForwardQueryResult fqr = (ForwardQueryResult) query.execute(); List<String> result = new ArrayList<>(); for (String existingChildId : (List<String>) fqr) { result.add(existingChildId); } return result; } }; List<String> result = (List<String>) rchImportFacilitatorDataService.executeSQLQuery(queryExecution); LOGGER.debug("Database child query time {}", queryTimer.time()); return result; } private String queryIdListChildren(List<RchChildRecord> childRecords) { StringBuilder stringBuilder = new StringBuilder(); int i = 0; stringBuilder.append("("); for (RchChildRecord childRecord: childRecords) { if (i != 0) { stringBuilder.append(", "); } stringBuilder.append(QUOTATION + childRecord.getRegistrationNo() + QUOTATION); i++; } stringBuilder.append(")"); return stringBuilder.toString(); } private List<FrontLineWorker> getDatabaseAsha(final List<RchAnmAshaRecord> ashaRecords, final long stateID) { Timer queryTimer = new Timer(); @SuppressWarnings("unchecked") SqlQueryExecution<List<FrontLineWorker>> queryExecution = new SqlQueryExecution<List<FrontLineWorker>>() { @Override public String getSqlQuery() { String query = "SELECT * FROM nms_front_line_workers WHERE state_id_OID = " + stateID + " and mctsFlwId IN (SELECT mctsFlwId from nms_front_line_workers WHERE state_id_OID = " + stateID + " group by mctsFlwId having count(*) = 1) " + " and mctsFlwId IN " + queryIdListAsha(ashaRecords); LOGGER.debug(SQL_QUERY_LOG, query); return query; } @Override public List<FrontLineWorker> execute(Query query) { query.setClass(FrontLineWorker.class); ForwardQueryResult fqr = (ForwardQueryResult) query.execute(); return (List<FrontLineWorker>) fqr; } }; List<FrontLineWorker> result = rchImportFacilitatorDataService.executeSQLQuery(queryExecution); LOGGER.debug("Database asha's query time {}", queryTimer.time()); return result; } private String queryIdListAsha(List<RchAnmAshaRecord> ashaRecords) { StringBuilder stringBuilder = new StringBuilder(); int i = 0; stringBuilder.append("("); for (RchAnmAshaRecord ashaRecord: ashaRecords) { if (i != 0) { stringBuilder.append(", "); } stringBuilder.append(QUOTATION + ashaRecord.getGfId() + QUOTATION); i++; } stringBuilder.append(")"); return stringBuilder.toString(); } private Date getDateFromFileName(String fileName) throws ParseException { String[] names = fileName.split("_"); String dateString = names[5].split(".csv")[0]; Date date = new SimpleDateFormat(DATE_FORMAT).parse(dateString); return date; } private List<Long> getStateIds() { String locationProp = settingsFacade.getProperty(Constants.RCH_LOCATIONS); if (StringUtils.isBlank(locationProp)) { return Collections.emptyList(); } String[] locationParts = StringUtils.split(locationProp, ','); List<Long> stateIds = new ArrayList<>(); for (String locationPart : locationParts) { stateIds.add(Long.valueOf(locationPart)); } return stateIds; } }
package cz.cuni.lf1.lge.ThunderSTORM.datagen; import cz.cuni.lf1.lge.ThunderSTORM.filters.BoxFilter; import cz.cuni.lf1.lge.ThunderSTORM.filters.IFilter; import cz.cuni.lf1.lge.ThunderSTORM.util.ImageProcessor; import static cz.cuni.lf1.lge.ThunderSTORM.util.Math.ceil; import cz.cuni.lf1.lge.ThunderSTORM.util.Range; import ij.process.FloatProcessor; import org.apache.commons.math3.random.RandomDataGenerator; import static cz.cuni.lf1.lge.ThunderSTORM.util.Math.sqr; import static cz.cuni.lf1.lge.ThunderSTORM.util.Math.sqrt; import static ij.process.ImageProcessor.BILINEAR; import ij.process.ShortProcessor; import java.util.Vector; public class DataGenerator { private RandomDataGenerator rand; private Vector<IntegratedGaussian> deleteLater; public DataGenerator() { rand = new RandomDataGenerator(); rand.reSeed(); deleteLater = new Vector<IntegratedGaussian>(); } public FloatProcessor generatePoissonNoise(int width, int height, double variance) { FloatProcessor img = new FloatProcessor(width, height); for(int x = 0; x < width; x++) for(int y = 0; y < height; y++) img.setf(x, y, (float)rand.nextPoisson(variance)); return img; } public FloatProcessor generateGaussianNoise(int width, int height, double mean, double variance) { double sigma = sqrt(variance); FloatProcessor img = new FloatProcessor(width, height); for(int x = 0; x < width; x++) for(int y = 0; y < height; y++) img.setf(x, y, (float)rand.nextGaussian(mean, sigma)); return img; } public FloatProcessor generateBackground(int width, int height, Drift drift, Range bkg) { // padd the background image; crop the center of the image later, after the drift is applied FloatProcessor img = new FloatProcessor(width + 2*(int)ceil(drift.dist), height + 2*(int)ceil(drift.dist)); for(int x = 0, w = img.getWidth(); x < w; x++) for(int y = 0, h = img.getHeight(); y < h; y++) img.setf(x, y, (float)rand.nextUniform(bkg.from, bkg.to, true)); IFilter filter = new BoxFilter(1+2*(int)(((double)Math.min(width, width))/8.0)); return filter.filterImage(img); } public Vector<IntegratedGaussian> generateMolecules(int width, int height, FloatProcessor mask, double pixelsize, double density, Range energy, Range fwhm) { Vector<IntegratedGaussian> molist = new Vector<IntegratedGaussian>(); double gPpx = sqr(pixelsize) * density, p_px, p, dx, dy; for(int x = 0; x < width; x++) { for(int y = 0; y < height; y++) { p_px = gPpx * mask.getf(x, y); // probability that a molecule appears inside the pixel p = rand.nextUniform(0.0, 1.0); while(p <= p_px) { dx = rand.nextUniform(-0.5, +0.5); dy = rand.nextUniform(-0.5, +0.5); molist.add(new IntegratedGaussian(rand, x+0.5+dx, y+0.5+dy, energy, fwhm)); p_px -= 1.0; } } } return molist; } public ShortProcessor renderFrame(int width, int height, int frame_no, Drift drift, Vector<IntegratedGaussian> molecules, FloatProcessor bkg, FloatProcessor add_noise, FloatProcessor mul_noise) { // 1. acquisition (with drift) double dx = drift.getDriftX(frame_no), dy = drift.getDriftY(frame_no); FloatProcessor frame = (FloatProcessor)bkg.duplicate(); frame.setInterpolationMethod(BILINEAR); frame.translate(dx, dy); frame.setRoi((int)ceil(drift.dist), (int)ceil(drift.dist), width, height); // see generateBackground frame = (FloatProcessor)frame.crop(); for(IntegratedGaussian mol : molecules) { mol.moveXY(dx, dy); if(mol.isOutOfRoi(frame.getRoi())) { // does the molecule get out of ROI due to the drift? deleteLater.add(mol); } else { mol.generate(frame); } } // remote the out-of-roi molecules for(IntegratedGaussian mol : deleteLater) { molecules.remove(mol); } deleteLater.clear(); // 2. read-out frame = ImageProcessor.add(frame, add_noise); // 3. gain frame = ImageProcessor.multiply(frame, mul_noise); return (ShortProcessor)frame.convertToShort(false); } }
package de.geeksfactory.opacclient.frontend; import java.io.InterruptedIOException; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.acra.ACRA; import org.holoeverywhere.LayoutInflater; import org.holoeverywhere.app.Activity; import org.holoeverywhere.app.Fragment; import org.holoeverywhere.widget.CheckBox; import org.holoeverywhere.widget.EditText; import org.holoeverywhere.widget.Spinner; import org.json.JSONException; import android.content.ActivityNotFoundException; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageManager; import android.net.Uri; import android.os.Bundle; import android.util.Log; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.ImageView; import de.geeksfactory.opacclient.OpacClient; import de.geeksfactory.opacclient.OpacTask; import de.geeksfactory.opacclient.R; import de.geeksfactory.opacclient.apis.OpacApi; import de.geeksfactory.opacclient.frontend.OpacActivity.AccountSelectedListener; import de.geeksfactory.opacclient.objects.Account; import de.geeksfactory.opacclient.storage.MetaDataSource; import de.geeksfactory.opacclient.storage.SQLMetaDataSource; public class SearchFragment extends Fragment implements AccountSelectedListener { protected SharedPreferences sp; public interface Callback { public void scanBarcode(); } protected Callback mCallback; protected View view; protected OpacClient app; protected Bundle savedState; protected boolean advanced = false; protected Set<String> fields; protected List<Map<String, String>> spinnerCategory_data; protected List<Map<String, String>> spinnerBranch_data; protected List<Map<String, String>> spinnerHomeBranch_data; protected long last_meta_try = 0; public boolean metaDataLoading = false; protected LoadMetaDataTask lmdt; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // Inflate the layout for this fragment view = inflater.inflate(R.layout.fragment_search, container, false); setHasOptionsMenu(true); setRetainInstance(true); sp = ((OpacActivity) getActivity()).getDefaultSharedPreferences(); app = (OpacClient) getActivity().getApplication(); // if (getIntent().getBooleanExtra("barcode", false)) { // BarcodeScanIntegrator integrator = new BarcodeScanIntegrator( // SearchActivity.this); // integrator.initiateScan(); // } else { // ArrayAdapter<CharSequence> order_adapter = ArrayAdapter // .createFromResource(this, R.array.orders, // R.layout.simple_spinner_item); // order_adapter // .setDropDownViewResource(R.layout.simple_spinner_dropdown_item); // ((Spinner) SearchActivity.this.findViewById(R.id.cbOrder)) // .setAdapter(order_adapter); ImageView ivBarcode = (ImageView) view.findViewById(R.id.ivBarcode); ivBarcode.setOnClickListener(new OnClickListener() { @Override public void onClick(View arg0) { mCallback.scanBarcode(); } }); return view; } @Override public void onViewCreated(View view, Bundle savedInstanceState) { if (!(app.getLibrary() == null)) { accountSelected(app.getAccount()); } if (savedInstanceState != null && savedInstanceState.containsKey("query")) { savedState = savedInstanceState.getBundle("query"); } if (savedState != null) loadQuery(savedState); } public void clear() { ((EditText) view.findViewById(R.id.etSimpleSearch)).setText(""); ((EditText) view.findViewById(R.id.etTitel)).setText(""); ((EditText) view.findViewById(R.id.etVerfasser)).setText(""); ((EditText) view.findViewById(R.id.etSchlagA)).setText(""); ((EditText) view.findViewById(R.id.etSchlagB)).setText(""); ((EditText) view.findViewById(R.id.etBarcode)).setText(""); ((EditText) view.findViewById(R.id.etISBN)).setText(""); ((EditText) view.findViewById(R.id.etJahr)).setText(""); ((EditText) view.findViewById(R.id.etJahrBis)).setText(""); ((EditText) view.findViewById(R.id.etJahrVon)).setText(""); ((EditText) view.findViewById(R.id.etSystematik)).setText(""); ((EditText) view.findViewById(R.id.etInteressenkreis)).setText(""); ((EditText) view.findViewById(R.id.etVerlag)).setText(""); ((CheckBox) view.findViewById(R.id.cbDigital)).setChecked(false); ((CheckBox) view.findViewById(R.id.cbAvailable)).setChecked(false); ((Spinner) view.findViewById(R.id.cbBranch)).setSelection(0); ((Spinner) view.findViewById(R.id.cbHomeBranch)).setSelection(0); ((Spinner) view.findViewById(R.id.cbMediengruppe)).setSelection(0); } protected void manageVisibility() { PackageManager pm = getActivity().getPackageManager(); if (app.getLibrary().getReplacedBy() != null && sp.getInt("annoyed", 0) < 5) { view.findViewById(R.id.rlReplaced).setVisibility(View.VISIBLE); view.findViewById(R.id.ivReplacedStore).setOnClickListener( new OnClickListener() { @Override public void onClick(View v) { try { Intent i = new Intent(Intent.ACTION_VIEW, Uri .parse("market://details?id=" + app.getLibrary() .getReplacedBy())); startActivity(i); } catch (ActivityNotFoundException e) { Log.i("play", "no market installed"); } } }); sp.edit().putInt("annoyed", sp.getInt("annoyed", 0) + 1).commit(); } else { view.findViewById(R.id.rlReplaced).setVisibility(View.GONE); } if (fields.contains(OpacApi.KEY_SEARCH_QUERY_FREE)) { view.findViewById(R.id.tvSearchAdvHeader).setVisibility( View.VISIBLE); view.findViewById(R.id.rlSimpleSearch).setVisibility(View.VISIBLE); } else { view.findViewById(R.id.tvSearchAdvHeader).setVisibility(View.GONE); view.findViewById(R.id.rlSimpleSearch).setVisibility(View.GONE); } if (fields.contains(OpacApi.KEY_SEARCH_QUERY_TITLE)) { view.findViewById(R.id.etTitel).setVisibility(View.VISIBLE); view.findViewById(R.id.tvTitel).setVisibility(View.VISIBLE); } else { view.findViewById(R.id.etTitel).setVisibility(View.GONE); view.findViewById(R.id.tvTitel).setVisibility(View.GONE); } if (fields.contains(OpacApi.KEY_SEARCH_QUERY_AUTHOR)) { view.findViewById(R.id.etVerfasser).setVisibility(View.VISIBLE); view.findViewById(R.id.tvVerfasser).setVisibility(View.VISIBLE); } else { view.findViewById(R.id.etVerfasser).setVisibility(View.GONE); view.findViewById(R.id.tvVerfasser).setVisibility(View.GONE); } if (fields.contains(OpacApi.KEY_SEARCH_QUERY_KEYWORDA) && advanced) { view.findViewById(R.id.llSchlag).setVisibility(View.VISIBLE); view.findViewById(R.id.tvSchlag).setVisibility(View.VISIBLE); } else { view.findViewById(R.id.llSchlag).setVisibility(View.GONE); view.findViewById(R.id.tvSchlag).setVisibility(View.GONE); } if (fields.contains(OpacApi.KEY_SEARCH_QUERY_KEYWORDB) && advanced) { view.findViewById(R.id.etSchlagB).setVisibility(View.VISIBLE); } else { view.findViewById(R.id.etSchlagB).setVisibility(View.GONE); } if (fields.contains(OpacApi.KEY_SEARCH_QUERY_BRANCH)) { view.findViewById(R.id.llBranch).setVisibility(View.VISIBLE); view.findViewById(R.id.tvZweigstelle).setVisibility(View.VISIBLE); } else { view.findViewById(R.id.llBranch).setVisibility(View.GONE); view.findViewById(R.id.tvZweigstelle).setVisibility(View.GONE); } if (fields.contains(OpacApi.KEY_SEARCH_QUERY_HOME_BRANCH)) { view.findViewById(R.id.llHomeBranch).setVisibility(View.VISIBLE); view.findViewById(R.id.tvHomeBranch).setVisibility(View.VISIBLE); } else { view.findViewById(R.id.llHomeBranch).setVisibility(View.GONE); view.findViewById(R.id.tvHomeBranch).setVisibility(View.GONE); } if (fields.contains(OpacApi.KEY_SEARCH_QUERY_CATEGORY)) { view.findViewById(R.id.llMediengruppe).setVisibility(View.VISIBLE); view.findViewById(R.id.tvMediengruppe).setVisibility(View.VISIBLE); } else { view.findViewById(R.id.llMediengruppe).setVisibility(View.GONE); view.findViewById(R.id.tvMediengruppe).setVisibility(View.GONE); } EditText etBarcode = (EditText) view.findViewById(R.id.etBarcode); String etBarcodeText = etBarcode.getText().toString(); if (fields.contains(OpacApi.KEY_SEARCH_QUERY_BARCODE) && (advanced || !etBarcodeText.equals(""))) { etBarcode.setVisibility(View.VISIBLE); } else { etBarcode.setVisibility(View.GONE); } if (fields.contains(OpacApi.KEY_SEARCH_QUERY_ISBN)) { view.findViewById(R.id.etISBN).setVisibility(View.VISIBLE); } else { view.findViewById(R.id.etISBN).setVisibility(View.GONE); } if (fields.contains(OpacApi.KEY_SEARCH_QUERY_DIGITAL)) { view.findViewById(R.id.cbDigital).setVisibility(View.VISIBLE); } else { view.findViewById(R.id.cbDigital).setVisibility(View.GONE); } if (fields.contains(OpacApi.KEY_SEARCH_QUERY_AVAILABLE)) { view.findViewById(R.id.cbAvailable).setVisibility(View.VISIBLE); } else { view.findViewById(R.id.cbAvailable).setVisibility(View.GONE); } if (fields.contains(OpacApi.KEY_SEARCH_QUERY_ISBN) || (fields.contains(OpacApi.KEY_SEARCH_QUERY_BARCODE) && (advanced || !etBarcodeText .equals("")))) { if (pm.hasSystemFeature(PackageManager.FEATURE_CAMERA)) { view.findViewById(R.id.ivBarcode).setVisibility(View.VISIBLE); } else { view.findViewById(R.id.ivBarcode).setVisibility(View.GONE); } view.findViewById(R.id.tvBarcodes).setVisibility(View.VISIBLE); view.findViewById(R.id.llBarcodes).setVisibility(View.VISIBLE); } else { view.findViewById(R.id.tvBarcodes).setVisibility(View.GONE); view.findViewById(R.id.llBarcodes).setVisibility(View.GONE); } if (fields.contains(OpacApi.KEY_SEARCH_QUERY_YEAR_RANGE_START) && fields.contains(OpacApi.KEY_SEARCH_QUERY_YEAR_RANGE_END)) { view.findViewById(R.id.llJahr).setVisibility(View.VISIBLE); view.findViewById(R.id.tvJahr).setVisibility(View.VISIBLE); view.findViewById(R.id.etJahr).setVisibility(View.GONE); } else if (fields.contains(OpacApi.KEY_SEARCH_QUERY_YEAR)) { view.findViewById(R.id.llJahr).setVisibility(View.GONE); view.findViewById(R.id.etJahr).setVisibility(View.VISIBLE); view.findViewById(R.id.tvJahr).setVisibility(View.VISIBLE); } else { view.findViewById(R.id.llJahr).setVisibility(View.GONE); view.findViewById(R.id.tvJahr).setVisibility(View.GONE); view.findViewById(R.id.etJahr).setVisibility(View.GONE); } if (fields.contains(OpacApi.KEY_SEARCH_QUERY_SYSTEM) && advanced) { view.findViewById(R.id.etSystematik).setVisibility(View.VISIBLE); view.findViewById(R.id.tvSystematik).setVisibility(View.VISIBLE); } else { view.findViewById(R.id.etSystematik).setVisibility(View.GONE); view.findViewById(R.id.tvSystematik).setVisibility(View.GONE); } if (fields.contains(OpacApi.KEY_SEARCH_QUERY_AUDIENCE) && advanced) { view.findViewById(R.id.etInteressenkreis).setVisibility( View.VISIBLE); view.findViewById(R.id.tvInteressenkreis).setVisibility( View.VISIBLE); } else { view.findViewById(R.id.etInteressenkreis).setVisibility(View.GONE); view.findViewById(R.id.tvInteressenkreis).setVisibility(View.GONE); } if (fields.contains(OpacApi.KEY_SEARCH_QUERY_PUBLISHER) && advanced) { view.findViewById(R.id.etVerlag).setVisibility(View.VISIBLE); view.findViewById(R.id.tvVerlag).setVisibility(View.VISIBLE); } else { view.findViewById(R.id.etVerlag).setVisibility(View.GONE); view.findViewById(R.id.tvVerlag).setVisibility(View.GONE); } if (fields.contains("order") && advanced) { view.findViewById(R.id.cbOrder).setVisibility(View.VISIBLE); view.findViewById(R.id.tvOrder).setVisibility(View.VISIBLE); } else { view.findViewById(R.id.cbOrder).setVisibility(View.GONE); view.findViewById(R.id.tvOrder).setVisibility(View.GONE); } } protected void fillComboBoxes() { Spinner cbZst = (Spinner) view.findViewById(R.id.cbBranch); Spinner cbZstHome = (Spinner) view.findViewById(R.id.cbHomeBranch); Spinner cbMg = (Spinner) view.findViewById(R.id.cbMediengruppe); String zst_home_before = ""; String zst_before = ""; String mg_before = ""; String selection; int selected = 0, i = 0; if (spinnerHomeBranch_data != null && spinnerHomeBranch_data.size() > 0 && spinnerHomeBranch_data.size() > cbZstHome .getSelectedItemPosition() && cbZstHome.getSelectedItemPosition() > 0) { zst_home_before = spinnerHomeBranch_data.get( cbZstHome.getSelectedItemPosition()).get("key"); } if (spinnerBranch_data != null && spinnerBranch_data.size() > cbZst.getSelectedItemPosition() && cbZst.getSelectedItemPosition() > 0) { zst_before = spinnerBranch_data .get(cbZst.getSelectedItemPosition()).get("key"); } if (spinnerCategory_data != null && spinnerCategory_data.size() > cbMg.getSelectedItemPosition() && cbMg.getSelectedItemPosition() > 0) { mg_before = spinnerCategory_data .get(cbMg.getSelectedItemPosition()).get("key"); } MetaDataSource data = new SQLMetaDataSource(app); try { data.open(); } catch (Exception e1) { throw new RuntimeException(e1); } Map<String, String> all = new HashMap<String, String>(); all.put("key", ""); all.put("value", getString(R.string.all)); spinnerBranch_data = data.getMeta(app.getLibrary().getIdent(), MetaDataSource.META_TYPE_BRANCH); spinnerBranch_data.add(0, all); cbZst.setAdapter(((OpacActivity) getActivity()).new MetaAdapter( getActivity(), spinnerBranch_data, R.layout.simple_spinner_item)); if (!"".equals(zst_before)) { for (Map<String, String> row : spinnerBranch_data) { if (row.get("key").equals(zst_before)) { selected = i; } i++; } cbZst.setSelection(selected); } spinnerHomeBranch_data = data.getMeta(app.getLibrary().getIdent(), MetaDataSource.META_TYPE_HOME_BRANCH); selected = 0; i = 0; if (!"".equals(zst_home_before)) { selection = zst_home_before; } else { if (sp.contains(OpacClient.PREF_HOME_BRANCH_PREFIX + app.getAccount().getId())) selection = sp.getString(OpacClient.PREF_HOME_BRANCH_PREFIX + app.getAccount().getId(), ""); else { try { selection = app.getLibrary().getData() .getString("homebranch"); } catch (JSONException e) { selection = ""; } } } for (Map<String, String> row : spinnerHomeBranch_data) { if (row.get("key").equals(selection)) { selected = i; } i++; } cbZstHome.setAdapter(((OpacActivity) getActivity()).new MetaAdapter( getActivity(), spinnerHomeBranch_data, R.layout.simple_spinner_item)); cbZstHome.setSelection(selected); spinnerCategory_data = data.getMeta(app.getLibrary().getIdent(), MetaDataSource.META_TYPE_CATEGORY); spinnerCategory_data.add(0, all); cbMg.setAdapter(((OpacActivity) getActivity()).new MetaAdapter( getActivity(), spinnerCategory_data, R.layout.simple_spinner_item)); if (!"".equals(mg_before)) { selected = 0; i = 0; for (Map<String, String> row : spinnerBranch_data) { if (row.get("key").equals(zst_before)) { selected = i; } i++; } cbZst.setSelection(selected); } if ((spinnerBranch_data.size() == 1 || !fields .contains(OpacApi.KEY_SEARCH_QUERY_BRANCH)) && (spinnerCategory_data.size() == 1 || !fields .contains(OpacApi.KEY_SEARCH_QUERY_CATEGORY)) && (spinnerHomeBranch_data.size() == 0 || !fields .contains(OpacApi.KEY_SEARCH_QUERY_HOME_BRANCH))) { loadMetaData(app.getLibrary().getIdent(), true); loadingIndicators(); } data.close(); } protected void loadingIndicators() { int visibility = metaDataLoading ? View.VISIBLE : View.GONE; view.findViewById(R.id.pbBranch).setVisibility(visibility); view.findViewById(R.id.pbHomeBranch).setVisibility(visibility); view.findViewById(R.id.pbMediengruppe).setVisibility(visibility); } public void loadMetaData(String lib) { loadMetaData(lib, false); } public void loadMetaData(String lib, boolean force) { if (metaDataLoading) return; if (System.currentTimeMillis() - last_meta_try < 3600) { return; } last_meta_try = System.currentTimeMillis(); MetaDataSource data = new SQLMetaDataSource(getActivity()); try { data.open(); } catch (Exception e) { throw new RuntimeException(e); } boolean fetch = !data.hasMeta(lib); data.close(); if (fetch || force) { metaDataLoading = true; lmdt = new LoadMetaDataTask(); lmdt.execute(getActivity().getApplication(), lib); } } public class LoadMetaDataTask extends OpacTask<Boolean> { private boolean success = true; private long account; @Override protected Boolean doInBackground(Object... arg0) { super.doInBackground(arg0); String lib = (String) arg0[1]; account = app.getAccount().getId(); try { if (lib.equals(app.getLibrary(lib).getIdent())) { app.getNewApi(app.getLibrary(lib)).start(); } else { app.getApi().start(); } success = true; } catch (java.net.UnknownHostException e) { success = false; } catch (java.net.SocketException e) { success = false; } catch (InterruptedIOException e) { success = false; } catch (Exception e) { ACRA.getErrorReporter().handleException(e); success = false; } return success; } @Override protected void onPostExecute(Boolean result) { if (getActivity() == null) return; if (account == app.getAccount().getId()) { metaDataLoading = false; loadingIndicators(); if (success) fillComboBoxes(); } } } @Override public void accountSelected(Account account) { metaDataLoading = false; advanced = sp.getBoolean("advanced", false); fields = new HashSet<String>(Arrays.asList(app.getApi() .getSearchFields())); manageVisibility(); fillComboBoxes(); loadingIndicators(); } public void go() { app.startSearch(getActivity(), saveQuery()); } public Map<String, String> saveQuery() { String zst = ""; String mg = ""; String zst_home = ""; if (spinnerBranch_data == null) return null; if (spinnerBranch_data.size() > 1) zst = spinnerBranch_data.get( ((Spinner) view.findViewById(R.id.cbBranch)) .getSelectedItemPosition()).get("key"); if (spinnerHomeBranch_data.size() > 0) { zst_home = spinnerHomeBranch_data.get( ((Spinner) view.findViewById(R.id.cbHomeBranch)) .getSelectedItemPosition()).get("key"); sp.edit() .putString( OpacClient.PREF_HOME_BRANCH_PREFIX + app.getAccount().getId(), zst_home) .commit(); } if (spinnerCategory_data.size() > 1) mg = spinnerCategory_data.get( ((Spinner) view.findViewById(R.id.cbMediengruppe)) .getSelectedItemPosition()).get("key"); Map<String, String> query = new HashMap<String, String>(); query.put(OpacApi.KEY_SEARCH_QUERY_FREE, ((EditText) view .findViewById(R.id.etSimpleSearch)).getEditableText() .toString()); query.put(OpacApi.KEY_SEARCH_QUERY_TITLE, ((EditText) view .findViewById(R.id.etTitel)).getEditableText().toString()); query.put(OpacApi.KEY_SEARCH_QUERY_AUTHOR, ((EditText) view .findViewById(R.id.etVerfasser)).getEditableText().toString()); query.put(OpacApi.KEY_SEARCH_QUERY_BRANCH, zst); query.put(OpacApi.KEY_SEARCH_QUERY_HOME_BRANCH, zst_home); query.put(OpacApi.KEY_SEARCH_QUERY_CATEGORY, mg); query.put(OpacApi.KEY_SEARCH_QUERY_ISBN, ((EditText) view .findViewById(R.id.etISBN)).getEditableText().toString()); query.put(OpacApi.KEY_SEARCH_QUERY_BARCODE, ((EditText) view .findViewById(R.id.etBarcode)).getEditableText().toString()); query.put(OpacApi.KEY_SEARCH_QUERY_YEAR, ((EditText) view .findViewById(R.id.etJahr)).getEditableText().toString()); query.put(OpacApi.KEY_SEARCH_QUERY_YEAR_RANGE_START, ((EditText) view .findViewById(R.id.etJahrVon)).getEditableText().toString()); query.put(OpacApi.KEY_SEARCH_QUERY_YEAR_RANGE_END, ((EditText) view .findViewById(R.id.etJahrBis)).getEditableText().toString()); query.put(OpacApi.KEY_SEARCH_QUERY_DIGITAL, String .valueOf(((CheckBox) view.findViewById(R.id.cbDigital)) .isChecked())); query.put(OpacApi.KEY_SEARCH_QUERY_AVAILABLE, String .valueOf(((CheckBox) view.findViewById(R.id.cbAvailable)) .isChecked())); if (advanced) { query.put(OpacApi.KEY_SEARCH_QUERY_KEYWORDA, ((EditText) view .findViewById(R.id.etSchlagA)).getEditableText().toString()); query.put(OpacApi.KEY_SEARCH_QUERY_KEYWORDB, ((EditText) view .findViewById(R.id.etSchlagB)).getEditableText().toString()); query.put(OpacApi.KEY_SEARCH_QUERY_SYSTEM, ((EditText) view .findViewById(R.id.etSystematik)).getEditableText() .toString()); query.put(OpacApi.KEY_SEARCH_QUERY_AUDIENCE, ((EditText) view .findViewById(R.id.etInteressenkreis)).getEditableText() .toString()); query.put(OpacApi.KEY_SEARCH_QUERY_PUBLISHER, ((EditText) view .findViewById(R.id.etVerlag)).getEditableText().toString()); query.put( "order", ((((Spinner) view.findViewById(R.id.cbOrder)) .getSelectedItemPosition()) + 1) + ""); } return query; } public void loadQuery(Bundle query) { ((EditText) view.findViewById(R.id.etSimpleSearch)).setText(query .getString(OpacApi.KEY_SEARCH_QUERY_FREE)); ((EditText) view.findViewById(R.id.etTitel)).setText(query .getString(OpacApi.KEY_SEARCH_QUERY_TITLE)); ((EditText) view.findViewById(R.id.etVerfasser)).setText(query .getString(OpacApi.KEY_SEARCH_QUERY_AUTHOR)); ((EditText) view.findViewById(R.id.etISBN)).setText(query .getString(OpacApi.KEY_SEARCH_QUERY_ISBN)); ((EditText) view.findViewById(R.id.etBarcode)).setText(query .getString(OpacApi.KEY_SEARCH_QUERY_BARCODE)); ((EditText) view.findViewById(R.id.etJahr)).setText(query .getString(OpacApi.KEY_SEARCH_QUERY_YEAR)); ((EditText) view.findViewById(R.id.etJahrVon)).setText(query .getString(OpacApi.KEY_SEARCH_QUERY_YEAR_RANGE_START)); ((EditText) view.findViewById(R.id.etJahrBis)).setText(query .getString(OpacApi.KEY_SEARCH_QUERY_YEAR_RANGE_END)); ((CheckBox) view.findViewById(R.id.cbDigital)).setChecked(query .getBoolean(OpacApi.KEY_SEARCH_QUERY_DIGITAL)); ((CheckBox) view.findViewById(R.id.cbAvailable)).setChecked(query .getBoolean(OpacApi.KEY_SEARCH_QUERY_AVAILABLE)); Spinner spBranch = (Spinner) view.findViewById(R.id.cbBranch); int i = 0; for (Map<String, String> row : spinnerBranch_data) { if (row.get("key").equals( query.getString(OpacApi.KEY_SEARCH_QUERY_BRANCH))) { spBranch.setSelection(i); break; } i++; } Spinner spHomeBranch = (Spinner) view.findViewById(R.id.cbHomeBranch); i = 0; for (Map<String, String> row : spinnerHomeBranch_data) { if (row.get("key").equals( query.getString(OpacApi.KEY_SEARCH_QUERY_HOME_BRANCH))) { spHomeBranch.setSelection(i); break; } i++; } Spinner spCategory = (Spinner) view.findViewById(R.id.cbMediengruppe); i = 0; for (Map<String, String> row : spinnerCategory_data) { if (row.get("key").equals( query.getString(OpacApi.KEY_SEARCH_QUERY_CATEGORY))) { spCategory.setSelection(i); break; } i++; } if (advanced) { ((EditText) view.findViewById(R.id.etSchlagA)).setText(query .getString(OpacApi.KEY_SEARCH_QUERY_KEYWORDA)); ((EditText) view.findViewById(R.id.etSchlagB)).setText(query .getString(OpacApi.KEY_SEARCH_QUERY_KEYWORDB)); ((EditText) view.findViewById(R.id.etSystematik)).setText(query .getString(OpacApi.KEY_SEARCH_QUERY_SYSTEM)); ((EditText) view.findViewById(R.id.etInteressenkreis)) .setText(query.getString(OpacApi.KEY_SEARCH_QUERY_AUDIENCE)); ((EditText) view.findViewById(R.id.etVerlag)).setText(query .getString(OpacApi.KEY_SEARCH_QUERY_PUBLISHER)); } } @Override public void onAttach(Activity activity) { super.onAttach(activity); try { mCallback = (Callback) activity; } catch (ClassCastException e) { throw new ClassCastException(activity.toString() + " must implement SearchFragment.Callback"); } } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { inflater.inflate(R.menu.fragment_search, menu); super.onCreateOptionsMenu(menu, inflater); } @Override public boolean onOptionsItemSelected(MenuItem item) { if (item.getItemId() == R.id.action_search_go) { go(); return true; } return super.onOptionsItemSelected(item); } @Override public void onSaveInstanceState(Bundle outState) { savedState = OpacClient.mapToBundle(saveQuery()); outState.putBundle("query", savedState); super.onSaveInstanceState(outState); } }
package de.mrapp.android.preference; import static de.mrapp.android.preference.util.Condition.ensureAtLeast; import static de.mrapp.android.preference.util.Condition.ensureNotNull; import android.annotation.TargetApi; import android.content.Context; import android.content.DialogInterface; import android.content.res.TypedArray; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.os.Build; import android.util.AttributeSet; import android.view.View; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.GridView; import de.mrapp.android.dialog.MaterialDialogBuilder; import de.mrapp.android.preference.adapter.ColorPaletteAdapter; /** * A preference, which allows to choose a color from a pre-defined color * palette. The chosen color will only be persisted, if confirmed by the user. * * @author Michael Rapp * * @since 1.4.0 */ public class ColorPalettePreference extends AbstractColorPickerPreference { /** * An array, which contains the colors, the preference allows to choose. */ private int[] colorPalette; /** * The size, which is used to preview colors in the preference's dialog. */ private int dialogPreviewSize; /** * The shape, which is used to preview colors in the preference's dialog. */ private PreviewShape dialogPreviewShape; /** * The border width, which is used to preview colors in the preference's * dialog. */ private int dialogPreviewBorderWidth; /** * The border color, which is used to preview colors in the preference's * dialog. */ private int dialogPreviewBorderColor; /** * The background, which is used to preview colors in the preference's * dialog. */ private Drawable dialogPreviewBackground; /** * The number of columns, which are used to preview colors in the * preference's dialog. */ private int numberOfColumns; /** * The adapter, which provides the colors for visualization using the * preference dialog's grid view. */ private ColorPaletteAdapter adapter; /** * The grid view, which is used to show the preference's color palette. */ private GridView gridView; /** * Initializes the preference. * * @param attributeSet * The attribute set, which should be used to initialize the * preferences, as an instance of the type {@link AttributeSet} */ private void initialize(final AttributeSet attributeSet) { colorPalette = new int[0]; setNegativeButtonText(android.R.string.cancel); obtainStyledAttributes(attributeSet); } /** * Obtains all attributes from a specific attribute set. * * @param attributeSet * The attribute set, the attributes should be obtained from, as * an instance of the type {@link AttributeSet} */ private void obtainStyledAttributes(final AttributeSet attributeSet) { TypedArray typedArray = getContext().obtainStyledAttributes(attributeSet, R.styleable.ColorPalettePreference); try { obtainColorPalette(typedArray); obtainDialogPreviewSize(typedArray); obtainDialogPreviewShape(typedArray); obtainDialogPreviewBorderWidth(typedArray); obtainDialogPreviewBorderColor(typedArray); obtainDialogPreviewBackground(typedArray); obtainNumberOfColumns(typedArray); } finally { typedArray.recycle(); } } /** * Obtains the color palette from a specific typed array. * * @param typedArray * The typed array, the color palette, should be obtained from, * as an instance of the class {@link TypedArray} */ private void obtainColorPalette(final TypedArray typedArray) { int resourceId = typedArray.getResourceId(R.styleable.ColorPalettePreference_colorPalette, -1); if (resourceId != -1) { int[] obtainedColorPalette = getContext().getResources().getIntArray(resourceId); if (obtainedColorPalette != null) { setColorPalette(obtainedColorPalette); } } } /** * Obtains the size, which should be used to preview colors in the * preference's dialog, from a specific typed array. * * @param typedArray * The typed array, the size should be obtained from, as an * instance of the class {@link TypedArray} */ private void obtainDialogPreviewSize(final TypedArray typedArray) { int defaultValue = getContext().getResources() .getDimensionPixelSize(R.dimen.color_palette_preference_default_dialog_preview_size); setDialogPreviewSize( typedArray.getDimensionPixelSize(R.styleable.ColorPalettePreference_dialogPreviewSize, defaultValue)); } /** * Obtains the shape, which should be used to preview colors in the * preference's dialog, from a specific typed array. * * @param typedArray * The typed array, the shape should be obtained from, as an * instance of the class {@link TypedArray} */ private void obtainDialogPreviewShape(final TypedArray typedArray) { int defaultValue = getContext().getResources() .getInteger(R.integer.color_palette_preference_default_dialog_preview_shape); setDialogPreviewShape(PreviewShape .fromValue(typedArray.getInteger(R.styleable.ColorPalettePreference_dialogPreviewShape, defaultValue))); } /** * Obtains the border width, which should be used to preview colors in the * preference's dialog, from a specific typed array. * * @param typedArray * The typed array, the border width should be obtained from, as * an instance of the class {@link TypedArray} */ private void obtainDialogPreviewBorderWidth(final TypedArray typedArray) { int defaultValue = getContext().getResources() .getDimensionPixelSize(R.dimen.color_palette_preference_default_dialog_preview_border_width); setDialogPreviewBorderWidth(typedArray .getDimensionPixelSize(R.styleable.ColorPalettePreference_dialogPreviewBorderWidth, defaultValue)); } /** * Obtains the border color, which should be used to preview colors in the * preference's dialog, from a specific typed array. * * @param typedArray * The typed array, the border color should be obtained from, as * an instance of the class {@link TypedArray} */ private void obtainDialogPreviewBorderColor(final TypedArray typedArray) { int defaultValue = getContext().getResources() .getColor(R.color.color_palette_preference_default_dialog_preview_border_color); setDialogPreviewBorderColor( typedArray.getColor(R.styleable.ColorPalettePreference_dialogPreviewBorderColor, defaultValue)); } /** * Obtains the background, which should be used to preview colors in the * preference's dialog, from a specific typed array. * * @param typedArray * The typed array, the background should be obtained from, as an * instance of the class {@link TypedArray} */ @SuppressWarnings("deprecation") private void obtainDialogPreviewBackground(final TypedArray typedArray) { int backgroundColor = typedArray.getColor(R.styleable.ColorPalettePreference_dialogPreviewBackground, -1); if (backgroundColor != -1) { setPreviewBackgroundColor(backgroundColor); } else { int resourceId = typedArray.getResourceId(R.styleable.ColorPalettePreference_dialogPreviewBackground, R.drawable.color_picker_default_preview_background); setDialogPreviewBackground(getContext().getResources().getDrawable(resourceId)); } } /** * Obtains the number of columns, which should be used to preview colors in * the preference's dialog, from a specific typed array. * * @param typedArray * The typed array, the number of columns should be obtained * from, as an instance of the class {@link TypedArray} */ private void obtainNumberOfColumns(final TypedArray typedArray) { int defaultValue = getContext().getResources() .getInteger(R.integer.color_palette_preference_default_number_of_columns); setNumberOfColumns(typedArray.getInteger(R.styleable.ColorPalettePreference_android_numColumns, defaultValue)); } /** * Creates and returns a listener, which allows to close the preference's * dialog, if a color has been chosen by the user. * * @return The listener, which has been created, as an instance of the type * {@link OnItemClickListener} */ private OnItemClickListener createItemClickListener() { return new OnItemClickListener() { @Override public void onItemClick(final AdapterView<?> parent, final View view, final int position, final long id) { ColorPalettePreference.this.onClick(getDialog(), DialogInterface.BUTTON_POSITIVE); getDialog().dismiss(); } }; } /** * Creates a new preference, which allows to choose a color from a * pre-defined color palette. * * @param context * The context, which should be used by the preference, as an * instance of the class {@link Context} */ public ColorPalettePreference(final Context context) { this(context, null); } /** * Creates a new preference, which allows to choose a color from a * pre-defined color palette. * * @param context * The context, which should be used by the preference, as an * instance of the class {@link Context} * @param attributeSet * The attributes of the XML tag that is inflating the * preference, as an instance of the type {@link AttributeSet} */ public ColorPalettePreference(final Context context, final AttributeSet attributeSet) { super(context, attributeSet); initialize(attributeSet); } /** * Creates a new preference, which allows to choose a color from a * pre-defined color palette. * * @param context * The context, which should be used by the preference, as an * instance of the class {@link Context} * @param attributeSet * The attributes of the XML tag that is inflating the * preference, as an instance of the type {@link AttributeSet} * @param defaultStyle * The default style to apply to this preference. If 0, no style * will be applied (beyond what is included in the theme). This * may either be an attribute resource, whose value will be * retrieved from the current theme, or an explicit style * resource */ public ColorPalettePreference(final Context context, final AttributeSet attributeSet, final int defaultStyle) { super(context, attributeSet, defaultStyle); initialize(attributeSet); } /** * Creates a new preference, which allows to choose a color from a * pre-defined color palette. * * @param context * The context, which should be used by the preference, as an * instance of the class {@link Context} * @param attributeSet * The attributes of the XML tag that is inflating the * preference, as an instance of the type {@link AttributeSet} * @param defaultStyle * The default style to apply to this preference. If 0, no style * will be applied (beyond what is included in the theme). This * may either be an attribute resource, whose value will be * retrieved from the current theme, or an explicit style * resource * @param defaultStyleResource * A resource identifier of a style resource that supplies * default values for the preference, used only if the default * style is 0 or can not be found in the theme. Can be 0 to not * look for defaults */ @TargetApi(Build.VERSION_CODES.LOLLIPOP) public ColorPalettePreference(final Context context, final AttributeSet attributeSet, final int defaultStyle, final int defaultStyleResource) { super(context, attributeSet, defaultStyle, defaultStyleResource); initialize(attributeSet); } /** * Returns the colors, the preference allows to choose. * * @return The colors, the preference allows to choose, as an * {@link Integer} array */ public final int[] getColorPalette() { return colorPalette; } /** * Sets the colors, the preference should allow to choose. * * @param colorPalette * The colors, which should be set, as an {@link Integer} array */ public final void setColorPalette(final int[] colorPalette) { ensureNotNull(colorPalette, "The color palette may not be null"); this.colorPalette = colorPalette; } /** * Sets the colors, the preference should allow to choose. * * @param resourceId * The resource id of the color palette, which should be set, as * an {@link Integer} value. The resource id must correspond to a * valid integer array resource */ public final void setColorPalette(final int resourceId) { setColorPalette(getContext().getResources().getIntArray(resourceId)); } /** * Returns the size, which is used to preview colors in the preference's * dialog. * * @return The size, which is used to preview colors in the preference's * dialog, as an {@link Integer} value in pixels */ public final int getDialogPreviewSize() { return dialogPreviewSize; } /** * Sets the size, which should be used to preview colors in the preference's * dialog. * * @param previewSize * The size, which should be set, as an {@link Integer} value in * pixels. The size must be at least 1 */ public final void setDialogPreviewSize(final int previewSize) { ensureAtLeast(previewSize, 1, "The preview size must be at least 1"); this.dialogPreviewSize = previewSize; } /** * Returns the shape, which is used to preview colors in the preference's * dialog. * * @return The shape, which is used to preview colors in the preference's * dialog, as a value of the enum {@link PreviewShape}. The shape * may either be <code>CIRCLE</code> or <code>SQUARE</code> */ public final PreviewShape getDialogPreviewShape() { return dialogPreviewShape; } /** * Sets the shape, which should be used to preview colors in the * preference's dialog. * * @param previewShape * The shape, which should be set, as a value of the enum * {@link PreviewShape}. The shape may not be null */ public final void setDialogPreviewShape(final PreviewShape previewShape) { ensureNotNull(previewShape, "The preview shape may not be null"); this.dialogPreviewShape = previewShape; } /** * Returns the border width, which is used to preview colors in the * preference's dialog. * * @return The border width, which is used to preview colors in the * preference's dialog, as an {@link Integer} value in pixels */ public final int getDialogPreviewBorderWidth() { return dialogPreviewBorderWidth; } /** * Sets the border width, which should be used to preview colors in the * preference's dialog. * * @param borderWidth * The border width, which should be set, as an {@link Integer} * value in pixels. The border width must be at least 0 */ public final void setDialogPreviewBorderWidth(final int borderWidth) { ensureAtLeast(borderWidth, 0, "The border width must be at least 0"); this.dialogPreviewBorderWidth = borderWidth; } /** * Returns the border color, which is used to preview colors in the * preference's dialog. * * @return The border color, which is used to preview colors in the * preference's dialog, as an {@link Integer} value */ public final int getDialogPreviewBorderColor() { return dialogPreviewBorderColor; } /** * Sets the border color, which should be used to preview colors in the * preference's dialog. * * @param borderColor * The border color, which should be set, as an {@link Integer} * value */ public final void setDialogPreviewBorderColor(final int borderColor) { this.dialogPreviewBorderColor = borderColor; } /** * Returns the background, which is used to preview colors in the * preference's dialog. * * @return The background, which is used to preview colors in the * preference's dialog, as an instance of the class {@link Drawable} */ public final Drawable getDialogPreviewBackground() { return dialogPreviewBackground; } /** * Sets the background, which should be used to preview colors in the * preference's dialog. * * @param background * The background, which should be set, as an instance of the * class {@link Drawable} or null, if no background should be * shown */ public final void setDialogPreviewBackground(final Drawable background) { this.dialogPreviewBackground = background; } /** * Sets the background, which should be used to preview colors in the * preference's dialog. * * @param resourceId * The resource id of the background, which should be set, as an * {@link Integer} value. The resource id must correspond to a * valid drawable resource */ @SuppressWarnings("deprecation") public final void setDialogPreviewBackground(final int resourceId) { setDialogPreviewBackground(getContext().getResources().getDrawable(resourceId)); } /** * Sets the background color, which should be used to preview colors in the * preference's dialog. * * @param color * The background color, which should be set, as an * {@link Integer} value */ public final void setDialogPreviewBackgroundColor(final int color) { setDialogPreviewBackground(new ColorDrawable(color)); } /** * Returns the number of columns, which are used to preview colors in the * preference's dialog. * * @return The number of columns, which are used to preview colors in the * preference's dialog, as an {@link Integer} value */ public final int getNumberOfColumns() { return numberOfColumns; } /** * Sets the number of columns, which should be used to preview colors in the * preference's dialog. * * @param numberOfColumns * The number of columns, which should be set, as an * {@link Integer} value. The number of columns must be at least * 1 */ public final void setNumberOfColumns(final int numberOfColumns) { ensureAtLeast(numberOfColumns, 1, "The number of columns must be at least 1"); this.numberOfColumns = numberOfColumns; } @Override protected final boolean needInputMethod() { return false; } @Override protected final void onPrepareDialog(final MaterialDialogBuilder dialogBuilder) { adapter = new ColorPaletteAdapter(getContext(), getColorPalette(), getDialogPreviewSize(), getDialogPreviewShape(), getDialogPreviewBorderWidth(), getDialogPreviewBorderColor(), getDialogPreviewBackground()); int selectedIndex = adapter.indexOf(getColor()); gridView = (GridView) View.inflate(getContext(), R.layout.color_palette, null); gridView.setNumColumns(getNumberOfColumns()); gridView.setChoiceMode(GridView.CHOICE_MODE_SINGLE); gridView.setOnItemClickListener(createItemClickListener()); gridView.setAdapter(adapter); gridView.setItemChecked(selectedIndex, true); gridView.setSelection(selectedIndex); dialogBuilder.setView(gridView); } @Override protected final void onDialogClosed(final boolean positiveResult) { if (positiveResult) { int selectedIndex = gridView.getCheckedItemPosition(); int newValue = adapter.getItem(selectedIndex); if (callChangeListener(newValue)) { setColor(newValue); } } gridView = null; adapter = null; } }
package dk.netarkivet.harvester.scheduler; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.jms.JMSException; import javax.jms.QueueBrowser; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import dk.netarkivet.common.distribute.JMSConnection; import dk.netarkivet.common.distribute.JMSConnectionFactory; import dk.netarkivet.common.exceptions.ArgumentNotValid; import dk.netarkivet.common.exceptions.IOFailure; import dk.netarkivet.common.lifecycle.LifeCycleComponent; import dk.netarkivet.common.utils.ExceptionUtils; import dk.netarkivet.common.utils.Settings; import dk.netarkivet.harvester.HarvesterSettings; import dk.netarkivet.harvester.datamodel.Job; import dk.netarkivet.harvester.datamodel.JobDAO; import dk.netarkivet.harvester.datamodel.JobPriority; import dk.netarkivet.harvester.datamodel.JobStatus; import dk.netarkivet.harvester.harvesting.HeritrixLauncher; import dk.netarkivet.harvester.harvesting.distribute.DoOneCrawlMessage; import dk.netarkivet.harvester.harvesting.distribute.JobChannelUtil; import dk.netarkivet.harvester.harvesting.distribute.MetadataEntry; /** * This class handles dispatching of scheduled Harvest jobs to the Harvest * servers.<p> * The scheduler loads all active harvest definitions on a regular basis and * extracts the scheduling information for each definition. * When a harvest definition is scheduled to start the scheduler * creates the corresponding harvest jobs and submits these * to the active HarvestServers.<p> * * It also handles backup and makes sure backup is not performed while * jobs are being scheduled.<p> * * Note: Only one <code>HarvestScheduler</code> should be running at a time. */ public class HarvestScheduler extends LifeCycleComponent { /** The logger to use. */ protected static final Log log = LogFactory.getLog( HarvestScheduler.class.getName()); /** The thread used to control when new dispatches should be run. */ private Thread dispatcherThread; /** Connection to JMS provider. */ private JMSConnection jmsConnection; /** Used for storing a map of the <code>QueueBrowsers</code> used be the * <code>HarvestScheduler</code>, so they don't need to be created over * and over (see Bug 2059). */ private Map<JobPriority, QueueBrowser> queueBrowsers; /** * Create new instance of the HarvestScheduler. */ public HarvestScheduler() { log.info("Creating HarvestScheduler"); jmsConnection = JMSConnectionFactory.getInstance(); } /** * Start the thread responsible for reading Harvest definitions from the * database, and dispatching the harvest job to the servers. */ public void start() { //ToDo implement real scheduling with timeout functionality. dispatcherThread = new Thread("HarvestScheduler") { public void run() { log.debug("Rescheduling any leftover jobs"); rescheduleSubmittedJobs(); int dispatchPeriode = Settings.getInt(HarvesterSettings.DISPATCH_JOBS_PERIOD); log.info("Scheduling dispatch every " + (dispatchPeriode/1000) + " seconds"); try { while (!dispatcherThread.isInterrupted()) { try { dispatchJobs(); } catch (Exception e) { log.error("Unable to dispatch new harvest jobs", e); } Thread.sleep(dispatchPeriode); } } catch (InterruptedException e) { log.info("HarvestJobDispatcher interrupted, " + e.getMessage()); } } }; dispatcherThread.start(); } /** * Reschedule all jobs with JobStatus SUBMITTED. */ private void rescheduleSubmittedJobs() { final JobDAO dao = JobDAO.getInstance(); final Iterator<Long> jobs = dao.getAllJobIds(JobStatus.SUBMITTED); int resubmitcount = 0; while (jobs.hasNext()) { long oldID = jobs.next(); long newID = dao.rescheduleJob(oldID); log.info("Resubmitting old job " + oldID + " as " + newID); resubmitcount++; } log.info(resubmitcount + " has been resubmitted."); } /** * Stop any job that has been in status STARTED a very long time defined * by the HarvesterSettings.JOB_TIMEOUT_TIME setting. * */ private void stopTimeoutJobs() { final JobDAO dao = JobDAO.getInstance(); final Iterator<Long> jobs = dao.getAllJobIds(JobStatus.STARTED); int stoppedJobs = 0; while (jobs.hasNext()) { long id = jobs.next(); Job job = dao.read(id); long timeDiff = Settings.getLong(HarvesterSettings.JOB_TIMEOUT_TIME) * 1000; Date endTime = new Date(); endTime.setTime(job.getActualStart().getTime() + timeDiff); if (new Date().after(endTime)) { final String msg = " Job " + id + " has exceeded its timeout of " + (Settings.getLong( HarvesterSettings.JOB_TIMEOUT_TIME) / 60) + " minutes." + " Changing status to " + "FAILED."; log.warn(msg); job.setStatus(JobStatus.FAILED); job.appendHarvestErrors(msg); dao.update(job); stoppedJobs++; } } if(stoppedJobs > 0) { log.warn("Changed " + stoppedJobs + " jobs from STARTED to FAILED"); } } /** * Dispatched new jobs * Stop jobs with status STARTED, which have been on running for more * than settings.harvester.scheduler.jobtimeouttime time. */ void dispatchJobs() { stopTimeoutJobs(); submitNewJobs(); } /** * Submit the next new job if the relevant message queue is empty. */ synchronized void submitNewJobs() { try { for (JobPriority priority: JobPriority.values()) { if (isQueueEmpty(priority)) { submitNextNewJob(priority); } else { if (log.isTraceEnabled()) log.trace("Skipping dispatching of " + priority + " jobs, the message queue is full"); } } } catch (JMSException e) { log.error("Unable to determine whether message queue is empty", e); } } /** * Submit the next new job (the one with the lowest ID) with the given * priority. */ private void submitNextNewJob(JobPriority priority) { final JobDAO dao = JobDAO.getInstance(); Iterator<Long> jobsToSubmit = dao.getAllJobIds(JobStatus.NEW, priority); if (!jobsToSubmit.hasNext()) { if (log.isTraceEnabled() ) { log.trace("No " + priority + " jobs to be run at this time"); } } else { if (log.isDebugEnabled() ) { log.debug("Submitting new " + priority + " job"); } final long jobID = jobsToSubmit.next(); Job jobToSubmit = null; try { jobToSubmit = dao.read(jobID); jobToSubmit.setStatus(JobStatus.SUBMITTED); jobToSubmit.setSubmittedDate(new Date()); dao.update(jobToSubmit); //Add alias metadata List<MetadataEntry> metadata = new ArrayList<MetadataEntry>(); MetadataEntry aliasMetadataEntry = MetadataEntry.makeAliasMetadataEntry( jobToSubmit.getJobAliasInfo(), jobToSubmit.getOrigHarvestDefinitionID(), jobToSubmit.getHarvestNum(), jobToSubmit.getJobID()); if (aliasMetadataEntry != null) { metadata.add(aliasMetadataEntry); } //Add duplicationReduction MetadataEntry, if Deduplication //is enabled. if (HeritrixLauncher.isDeduplicationEnabledInTemplate( jobToSubmit.getOrderXMLdoc())) { MetadataEntry duplicateReductionMetadataEntry = MetadataEntry.makeDuplicateReductionMetadataEntry( dao.getJobIDsForDuplicateReduction(jobID), jobToSubmit.getOrigHarvestDefinitionID(), jobToSubmit.getHarvestNum(), jobToSubmit.getJobID() ); if (duplicateReductionMetadataEntry != null) { metadata.add(duplicateReductionMetadataEntry); } } doOneCrawl(jobToSubmit, metadata); if (log.isTraceEnabled() ) { log.trace("Job " + jobToSubmit + " sent to harvest queue."); } } catch (Throwable e) { String message = "Error while scheduling job " + jobID; log.warn(message, e); if (jobToSubmit != null) { jobToSubmit.setStatus(JobStatus.FAILED); jobToSubmit.appendHarvestErrors(message); jobToSubmit.appendHarvestErrorDetails( ExceptionUtils.getStackTrace(e)); dao.update(jobToSubmit); } } } } /** * Checks that the message queue for the given harvest job is empty and * therefore ready for the next message. * @param priority The job priority used for the channel of the queue * @return Is the queue empty * @throws JMSException Unable to retrieve queue information */ private boolean isQueueEmpty(JobPriority priority) throws JMSException { if (queueBrowsers == null) { createQueueBrowsers(); } QueueBrowser qBrowser = queueBrowsers.get(priority); try { return !qBrowser.getEnumeration().hasMoreElements(); } catch (JMSException e) { log.warn("Failed to tjeck if queues where empty, trying to " + "reestablish session and queue browsers ", e); createQueueBrowsers(); qBrowser = queueBrowsers.get(priority); return !qBrowser.getEnumeration().hasMoreElements(); } } private void createQueueBrowsers() throws JMSException { queueBrowsers = new HashMap<JobPriority, QueueBrowser>(); for (JobPriority priority: JobPriority.values()) { log.debug("Creating QueueBrowser for " + priority + " jobs"); queueBrowsers.put(priority, jmsConnection.createQueueBrowser( JobChannelUtil.getChannel(priority))); } } /** * Submit an doOneCrawl request to a HarvestControllerServer with correct * priority. * @param job the specific job to send * @param metadata pre-harvest metadata to store in arcfile. * @throws ArgumentNotValid one of the parameters are null * @throws IOFailure if unable to send the doOneCrawl request to a * harvestControllerServer */ public void doOneCrawl(Job job, List<MetadataEntry> metadata) throws ArgumentNotValid, IOFailure { ArgumentNotValid.checkNotNull(job, "job"); ArgumentNotValid.checkNotNull(metadata, "metadata"); DoOneCrawlMessage nMsg = new DoOneCrawlMessage(job, JobChannelUtil.getChannel(job.getPriority()), metadata); log.debug("Send crawl request: " + nMsg); jmsConnection.send(nMsg); } /** * Release allocated resources (JMS connections) and stops dispatching * harvest jobs, all without logging. */ @Override public void shutdown() { log.debug("HarvestScheduler closing down."); if (dispatcherThread != null) { dispatcherThread.interrupt(); dispatcherThread = null; } jmsConnection = null; } }
package dr.evolution.tree.treemetrics; import dr.evolution.tree.Clade; import dr.evolution.tree.Tree; import java.util.*; import static dr.evolution.tree.treemetrics.TreeMetric.Utils.checkTreeTaxa; /** * @author Andrew Rambaut * @version $Id$ */ public class RobinsonFouldsMetric implements TreeMetric { public static Type TYPE = Type.ROBINSON_FOULDS; public RobinsonFouldsMetric() { } @Override public double getMetric(Tree tree1, Tree tree2) { checkTreeTaxa(tree1, tree2); Set<Clade> clades1 = Clade.getCladeSet(tree1); Set<Clade> clades2 = Clade.getCladeSet(tree2); clades1.removeAll(clades2); // Technically RF would be twice this because it doesn't assume // the same set of tips in both trees (so may have a different // number of clades missing from each). return clades1.size(); } @Override public Type getType() { return TYPE; } @Override public String toString() { return getType().getShortName(); } // todo - add in Citable: // Robinson, D. R.; Foulds, L. R. (1981). "Comparison of phylogenetic trees". Mathematical Biosciences. 53: 131-147. doi:10.1016/0025-5564(81)90043-2. }
package dr.evomodelxml.coalescent; import dr.evolution.tree.Tree; import dr.evolution.tree.TreeUtils; import dr.evolution.util.Taxa; import dr.evolution.util.TaxonList; import dr.evomodel.coalescent.*; import dr.evomodel.tree.TreeModel; import dr.xml.*; import java.util.ArrayList; import java.util.List; /** * MultiTreeIntervalsParser * * @author Andrew Rambaut */ public class MultiTreeIntervalsParser extends AbstractXMLObjectParser { public static final String MULTI_TREE_INTERVALS = "multiTreeIntervals"; public static final String TREES = "trees"; public static final String SINGLETONS = "singletons"; public static final String INCLUDE_STEMS = "includeStems"; public static final String CUTOFF = "cutoff"; public String getParserName() { return MULTI_TREE_INTERVALS; } public Object parseXMLObject(XMLObject xo) throws XMLParseException { XMLObject cxo = xo.getChild(TREES); List<Tree> trees = new ArrayList<Tree>(cxo.getAllChildren(Tree.class)); Taxa singletonTaxa = null; if(xo.hasChildNamed(SINGLETONS)){ singletonTaxa = (Taxa)xo.getElementFirstChild(SINGLETONS); } boolean includeStems = xo.getBooleanAttribute(INCLUDE_STEMS); double cutoffTime = 0.0; if (includeStems) { if (!xo.hasAttribute(CUTOFF)) { throw new XMLParseException("MultiTreeIntervals needs a cutoff time if it is to include stems"); } cutoffTime = xo.getDoubleAttribute(CUTOFF); } return new MultiTreeIntervals(trees, singletonTaxa, includeStems, cutoffTime); }
package dr.math.distributions; import dr.math.ErrorFunction; import org.apache.commons.math.MathException; import org.apache.commons.math.special.Beta; import dr.math.*; /** * @author Trevor Bedford * @version $Id$ */ public class NegativeBinomialDistribution implements Distribution { double mean; double stdev; public NegativeBinomialDistribution(double mean, double stdev) { this.mean = mean; this.stdev = stdev; } public double pdf(double x) { if (x < 0) return 0; return Math.exp(logPdf(x)); } public double logPdf(double x) { if (x < 0) return Double.NEGATIVE_INFINITY; double r = -1 * (mean*mean) / (mean - stdev*stdev); double p = mean / (stdev*stdev); return Math.log(Math.pow(1-p,x)) + Math.log(Math.pow(p, r)) + GammaFunction.lnGamma(r+x) - GammaFunction.lnGamma(r) - GammaFunction.lnGamma(x+1); } public double cdf(double x) { double r = -1 * (mean*mean) / (mean - stdev*stdev); double p = mean / (stdev*stdev); try { return Beta.regularizedBeta(p, r, x+1); } catch (MathException e) { // AR - throwing exceptions deep in numerical code causes trouble. Catching runtime // exceptions is bad. Better to return NaN and let the calling code deal with it. return Double.NaN; // "Couldn't calculate beta cdf for alpha = " + alpha + ", beta = " + beta + ": " +e.getMessage()); } } public double quantile(double y) { // TB - I'm having trouble implementing this // LM - A first stab using simple minimisation to invert the function (under absolute loss) // Implementation based on the qnbinom.c function used in R final double r = -1 * (mean*mean) / (mean - stdev*stdev); final double p = mean / (stdev*stdev); final double prob = y; final double Q = 1.0 / p; final double P = (1.0 - p) * Q; final double gamma = (Q + P)/stdev; final double z = Math.sqrt(2.0) * ErrorFunction.inverseErf(2.0 * y - 1.0); final double crudeY = mean + stdev * (z + gamma * (z*z - 1) / 6); UnivariateFunction f = new UnivariateFunction() { double tent = Double.NaN; public double evaluate(final double argument) { try { tent = Beta.regularizedBeta(p, r, argument+1); } catch (MathException e) { return Double.NaN; } double score = Math.abs(tent-prob); return score; } public int getNumArguments() { return 1; } public double getLowerBound() { // 20% window should cut it. Probably too large even... return Math.min(crudeY - .2*crudeY, 0); } public double getUpperBound() { return crudeY + .2*crudeY; } }; UnivariateMinimum minimum = new UnivariateMinimum(); double q = minimum.findMinimum(f); return Math.ceil(q); } public double mean() { return mean; } public double variance() { return stdev*stdev; } public UnivariateFunction getProbabilityDensityFunction() { throw new RuntimeException(); } public static void main(String[] args) { System.out.println("Test negative binomial"); System.out.println("Mean 5, sd 5, x 5, pdf 0.074487, logPdf -2.59713, median 4"); NegativeBinomialDistribution dist = new NegativeBinomialDistribution(5, 5); System.out.println("pdf = " + dist.pdf(5)); System.out.println("quantile(0.5) aka median = " + dist.quantile(0.5)); System.out.println("logPdf = " + dist.logPdf(5)); } }
package uk.org.taverna.scufl2.translator.t2flow; import java.beans.BeanInfo; import java.beans.IntrospectionException; import java.beans.Introspector; import java.beans.PropertyDescriptor; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.InvocationTargetException; import java.net.URI; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBElement; import javax.xml.bind.JAXBException; import javax.xml.bind.Unmarshaller; import org.apache.log4j.Logger; import org.w3c.dom.Document; import uk.org.taverna.scufl2.api.activity.ActivityType; import uk.org.taverna.scufl2.api.activity.InputActivityPort; import uk.org.taverna.scufl2.api.activity.OutputActivityPort; import uk.org.taverna.scufl2.api.bindings.Bindings; import uk.org.taverna.scufl2.api.bindings.ProcessorBinding; import uk.org.taverna.scufl2.api.bindings.ProcessorInputPortBinding; import uk.org.taverna.scufl2.api.bindings.ProcessorOutputPortBinding; import uk.org.taverna.scufl2.api.common.ConfigurableProperty; import uk.org.taverna.scufl2.api.common.Named; import uk.org.taverna.scufl2.api.common.ToBeDecided; import uk.org.taverna.scufl2.api.configurations.ConfigurablePropertyConfiguration; import uk.org.taverna.scufl2.api.configurations.Configuration; import uk.org.taverna.scufl2.api.container.TavernaResearchObject; import uk.org.taverna.scufl2.api.core.DataLink; import uk.org.taverna.scufl2.api.core.IterationStrategy; import uk.org.taverna.scufl2.api.core.Processor; import uk.org.taverna.scufl2.api.core.Workflow; import uk.org.taverna.scufl2.api.port.InputProcessorPort; import uk.org.taverna.scufl2.api.port.InputWorkflowPort; import uk.org.taverna.scufl2.api.port.OutputProcessorPort; import uk.org.taverna.scufl2.api.port.OutputWorkflowPort; import uk.org.taverna.scufl2.api.port.ReceiverPort; import uk.org.taverna.scufl2.api.port.SenderPort; import uk.org.taverna.scufl2.xml.t2flow.jaxb.Activity; import uk.org.taverna.scufl2.xml.t2flow.jaxb.AnnotatedGranularDepthPort; import uk.org.taverna.scufl2.xml.t2flow.jaxb.AnnotatedGranularDepthPorts; import uk.org.taverna.scufl2.xml.t2flow.jaxb.ConfigBean; import uk.org.taverna.scufl2.xml.t2flow.jaxb.Dataflow; import uk.org.taverna.scufl2.xml.t2flow.jaxb.Datalinks; import uk.org.taverna.scufl2.xml.t2flow.jaxb.DepthPort; import uk.org.taverna.scufl2.xml.t2flow.jaxb.DepthPorts; import uk.org.taverna.scufl2.xml.t2flow.jaxb.DispatchStack; import uk.org.taverna.scufl2.xml.t2flow.jaxb.GranularDepthPort; import uk.org.taverna.scufl2.xml.t2flow.jaxb.GranularDepthPorts; import uk.org.taverna.scufl2.xml.t2flow.jaxb.IterationStrategyStack; import uk.org.taverna.scufl2.xml.t2flow.jaxb.Link; import uk.org.taverna.scufl2.xml.t2flow.jaxb.LinkType; import uk.org.taverna.scufl2.xml.t2flow.jaxb.Map; import uk.org.taverna.scufl2.xml.t2flow.jaxb.Mapping; import uk.org.taverna.scufl2.xml.t2flow.jaxb.Port; import uk.org.taverna.scufl2.xml.t2flow.jaxb.Ports; import uk.org.taverna.scufl2.xml.t2flow.jaxb.Processors; import uk.org.taverna.scufl2.xml.t2flow.jaxb.Raven; import uk.org.taverna.scufl2.xml.t2flow.jaxb.Role; @SuppressWarnings("restriction") public class T2FlowParser { public static <T extends Named> T findNamed(Collection<T> namedObjects, String name) { for (T named : namedObjects) { if (named.getName().equals(name)) { return named; } } return null; } protected ThreadLocal<uk.org.taverna.scufl2.api.activity.Activity> currentActivity = new ThreadLocal<uk.org.taverna.scufl2.api.activity.Activity>(); protected ThreadLocal<Bindings> currentBindings = new ThreadLocal<Bindings>(); protected ThreadLocal<Processor> currentProcessor = new ThreadLocal<Processor>(); protected ThreadLocal<ProcessorBinding> currentProcessorBinding = new ThreadLocal<ProcessorBinding>(); // Currently parsing protected ThreadLocal<TavernaResearchObject> currentResearchObject = new ThreadLocal<TavernaResearchObject>(); protected ThreadLocal<Workflow> currentWorkflow = new ThreadLocal<Workflow>(); private JAXBContext jc; private Logger logger = Logger.getLogger(T2FlowParser.class); private boolean strict = true; private Unmarshaller unmarshaller; public T2FlowParser() throws JAXBException { jc = JAXBContext.newInstance("uk.org.taverna.scufl2.xml.t2flow.jaxb", getClass().getClassLoader()); unmarshaller = jc.createUnmarshaller(); } protected ReceiverPort findReceiverPort(Workflow wf, Link sink) throws ParseException { if (sink.getType().equals(LinkType.DATAFLOW)) { String portName = sink.getPort(); OutputWorkflowPort candidate = wf.getOutputPorts().getByName(portName); if (candidate == null) { throw new ParseException("Link to unknown workflow port " + portName); } return candidate; } else if (sink.getType().equals(LinkType.PROCESSOR)) { String processorName = sink.getProcessor(); Processor processor = wf.getProcessors().getByName(processorName); if (processor == null) { throw new ParseException("Link to unknown processor " + processorName); } String portName = sink.getPort(); InputProcessorPort candidate = processor.getInputPorts().getByName(portName); if (candidate == null) { throw new ParseException("Link to unknown port " + portName + " in " + processorName); } return candidate; } else if (sink.getType().equals(LinkType.MERGE)) { throw new ParseException( "Translation of merges not yet implemented"); } throw new ParseException("Could not parse receiver " + sink); } protected SenderPort findSenderPort(Workflow wf, Link source) throws ParseException { if (source.getType().equals(LinkType.DATAFLOW)) { String portName = source.getPort(); InputWorkflowPort candidate = wf.getInputPorts().getByName(portName); if (candidate == null) { throw new ParseException("Link from unknown workflow port " + portName); } return candidate; } else if (source.getType().equals(LinkType.PROCESSOR)) { String processorName = source.getProcessor(); Processor processor = wf.getProcessors().getByName(processorName); if (processor == null) { throw new ParseException("Link from unknown processor " + processorName); } String portName = source.getPort(); OutputProcessorPort candidate = processor.getOutputPorts().getByName(portName); if (candidate == null) { throw new ParseException("Link from unknown port " + portName + " in " + processorName); } return candidate; } else if (source.getType().equals(LinkType.MERGE)) { throw new ParseException( "Translation of merges not yet implemented"); } throw new ParseException("Could not parse sender " + source); } public boolean isStrict() { return strict; } protected void makeDefaultBindings( uk.org.taverna.scufl2.xml.t2flow.jaxb.Workflow wf) { Bindings bindings = new Bindings(wf.getProducedBy()); currentResearchObject.get().getBindings().add(bindings); currentBindings.set(bindings); } protected URI mapActivityFromRaven(Raven raven, String activityClass) { URI ravenURI = URI .create("http://ns.taverna.org.uk/2010/activity/raven/"); // TODO: Perform actual mapping return ravenURI.resolve(raven.getGroup() + "/" + raven.getArtifact() + "/" + raven.getVersion() + "/" + activityClass); } protected uk.org.taverna.scufl2.api.activity.Activity parseActivity( Activity origActivity) { Raven raven = origActivity.getRaven(); String activityClass = origActivity.getClazz(); URI activityId = mapActivityFromRaven(raven, activityClass); uk.org.taverna.scufl2.api.activity.Activity newActivity = new uk.org.taverna.scufl2.api.activity.Activity(); newActivity.setType(new ActivityType(activityId.toASCIIString())); return newActivity; } protected void parseActivityBinding(Activity origActivity) throws ParseException { ProcessorBinding processorBinding = new ProcessorBinding(); currentBindings.get().getProcessorBindings().add(processorBinding); processorBinding.setBoundProcessor(currentProcessor.get()); currentProcessorBinding.set(processorBinding); uk.org.taverna.scufl2.api.activity.Activity newActivity = parseActivity(origActivity); currentActivity.set(newActivity); currentResearchObject.get().getActivities().add(newActivity); processorBinding.setBoundActivity(newActivity); parseActivityInputMap(origActivity.getInputMap()); parseActivityOutputMap(origActivity.getOutputMap()); parseActivityConfiguration(origActivity.getConfigBean()); currentActivity.remove(); currentProcessorBinding.remove(); } protected void parseActivityConfiguration(ConfigBean configBean) { Configuration configuration = new Configuration(); configuration.setConfigured(currentActivity.get()); Object config = configBean.getAny(); System.out.println("Checking " + config + " " + config.getClass()); BeanInfo configBeanInfo; try { configBeanInfo = Introspector.getBeanInfo(config.getClass()); } catch (IntrospectionException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return; } for (PropertyDescriptor property : configBeanInfo.getPropertyDescriptors()) { if (property.getReadMethod() == null) { continue; } ConfigurablePropertyConfiguration configurablePropertyConfiguration = new ConfigurablePropertyConfiguration(); configurablePropertyConfiguration.setParent(configuration); String propertyName = property.getName(); ConfigurableProperty configuredProperty = new ConfigurableProperty(propertyName); configurablePropertyConfiguration.setConfiguredProperty(configuredProperty); try { Object value = property.getReadMethod().invoke(config); System.out.println(propertyName + ": "+ value); if (value instanceof Document) { Document document = (Document) value; value = document.getDocumentElement(); } configurablePropertyConfiguration.setValue(value); } catch (IllegalArgumentException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (InvocationTargetException e) { // TODO Auto-generated catch block e.printStackTrace(); } } currentResearchObject.get().getConfigurations().add(configuration); } protected void parseActivityInputMap(Map inputMap) throws ParseException { for (Mapping mapping : inputMap.getMap()) { String fromProcessorOutput = mapping.getFrom(); String toActivityOutput = mapping.getTo(); ProcessorInputPortBinding processorInputPortBinding = new ProcessorInputPortBinding(); InputProcessorPort inputProcessorPort = findNamed( currentProcessor.get().getInputPorts(), fromProcessorOutput); if (inputProcessorPort == null) { String message = "Invalid input port binding, " + "unknown processor port: " + fromProcessorOutput + "->" + toActivityOutput + " in " + currentProcessor.get(); if (isStrict()) { throw new ParseException(message); } else { logger.warn(message); continue; } } InputActivityPort inputActivityPort = new InputActivityPort(); inputActivityPort.setName(toActivityOutput); inputActivityPort.setParent(currentActivity.get()); currentActivity.get().getInputPorts().add(inputActivityPort); processorInputPortBinding.setBoundActivityPort(inputActivityPort); processorInputPortBinding.setBoundProcessorPort(inputProcessorPort); currentProcessorBinding.get().getInputPortBindings().add( processorInputPortBinding); } } protected void parseActivityOutputMap(Map outputMap) throws ParseException { for (Mapping mapping : outputMap.getMap()) { String fromActivityOutput = mapping.getFrom(); String toProcessorOutput = mapping.getTo(); ProcessorOutputPortBinding processorOutputPortBinding = new ProcessorOutputPortBinding(); OutputProcessorPort outputProcessorPort = findNamed( currentProcessor.get().getOutputPorts(), toProcessorOutput); if (outputProcessorPort == null) { String message = "Invalid output port binding, " + "unknown processor port: " + fromActivityOutput + "->" + toProcessorOutput + " in " + currentProcessor.get(); if (isStrict()) { throw new ParseException(message); } else { logger.warn(message); continue; } } OutputActivityPort outputActivityPort = new OutputActivityPort(); outputActivityPort.setName(fromActivityOutput); outputActivityPort.setParent(currentActivity.get()); currentActivity.get().getOutputPorts().add(outputActivityPort); processorOutputPortBinding.setBoundActivityPort(outputActivityPort); processorOutputPortBinding.setBoundProcessorPort(outputProcessorPort); currentProcessorBinding.get().getOutputPortBindings().add( processorOutputPortBinding); } } protected Workflow parseDataflow(Dataflow df) throws ParseException { Workflow wf = new Workflow(); currentWorkflow.set(wf); wf.setName(df.getName()); // wf.setId(df.getId()); wf.setInputPorts(parseInputPorts(df.getInputPorts())); wf.setOutputPorts(parseOutputPorts(df.getOutputPorts())); wf.setProcessors(parseProcessors(df.getProcessors())); wf.setDatalinks(parseDatalinks(df.getDatalinks())); // TODO: Start conditions, annotations currentWorkflow.remove(); return wf; } protected Set<DataLink> parseDatalinks(Datalinks origLinks) throws ParseException { HashSet<DataLink> newLinks = new HashSet<DataLink>(); for (uk.org.taverna.scufl2.xml.t2flow.jaxb.DataLink origLink : origLinks .getDatalink()) { try { SenderPort senderPort = findSenderPort(currentWorkflow.get(), origLink.getSource()); ReceiverPort receiverPort = findReceiverPort(currentWorkflow .get(), origLink.getSink()); DataLink newLink = new DataLink(senderPort, receiverPort); newLinks.add(newLink); } catch (ParseException ex) { logger.warn("Could not translate link:\n" + origLink, ex); if (isStrict()) { throw ex; } continue; } } return newLinks; } protected ToBeDecided parseDispatchStack(DispatchStack dispatchStack) { return new ToBeDecided(); } @SuppressWarnings("boxing") protected Set<InputWorkflowPort> parseInputPorts( AnnotatedGranularDepthPorts originalPorts) throws ParseException { Set<InputWorkflowPort> createdPorts = new HashSet<InputWorkflowPort>(); for (AnnotatedGranularDepthPort originalPort : originalPorts.getPort()) { InputWorkflowPort newPort = new InputWorkflowPort(currentWorkflow .get(), originalPort.getName()); newPort.setDepth(originalPort.getDepth().intValue()); if (!originalPort.getGranularDepth() .equals(originalPort.getDepth())) { String message = "Specific input port granular depth not " + "supported in scufl2, port " + originalPort.getName() + " has depth " + originalPort.getDepth() + " and granular depth " + originalPort.getGranularDepth(); logger.warn(message); if (isStrict()) { throw new ParseException(message); } } createdPorts.add(newPort); } return createdPorts; } protected List<IterationStrategy> parseIterationStrategyStack( IterationStrategyStack originalStack) { List<IterationStrategy> newStack = new ArrayList<IterationStrategy>(); // TODO: Copy iteration strategy return newStack; } protected Set<OutputWorkflowPort> parseOutputPorts(Ports originalPorts) { Set<OutputWorkflowPort> createdPorts = new HashSet<OutputWorkflowPort>(); for (Port originalPort : originalPorts.getPort()) { OutputWorkflowPort newPort = new OutputWorkflowPort(currentWorkflow .get(), originalPort.getName()); createdPorts.add(newPort); } return createdPorts; } @SuppressWarnings("boxing") protected Set<InputProcessorPort> parseProcessorInputPorts( Processor newProc, DepthPorts origPorts) { Set<InputProcessorPort> newPorts = new HashSet<InputProcessorPort>(); for (DepthPort origPort : origPorts.getPort()) { InputProcessorPort newPort = new InputProcessorPort(newProc, origPort.getName()); newPort.setDepth(origPort.getDepth().intValue()); // TODO: What about InputProcessorPort granular depth? newPorts.add(newPort); } return newPorts; } @SuppressWarnings("boxing") protected Set<OutputProcessorPort> parseProcessorOutputPorts( Processor newProc, GranularDepthPorts origPorts) { Set<OutputProcessorPort> newPorts = new HashSet<OutputProcessorPort>(); for (GranularDepthPort origPort : origPorts.getPort()) { OutputProcessorPort newPort = new OutputProcessorPort(newProc, origPort.getName()); newPort.setDepth(origPort.getDepth().intValue()); newPort.setGranularDepth(origPort.getGranularDepth().intValue()); newPorts.add(newPort); } return newPorts; } protected Set<Processor> parseProcessors(Processors originalProcessors) throws ParseException { HashSet<Processor> newProcessors = new HashSet<Processor>(); for (uk.org.taverna.scufl2.xml.t2flow.jaxb.Processor origProc : originalProcessors .getProcessor()) { Processor newProc = new Processor(currentWorkflow.get(), origProc .getName()); currentProcessor.set(newProc); newProc.setInputPorts(parseProcessorInputPorts(newProc, origProc .getInputPorts())); newProc.setOutputPorts(parseProcessorOutputPorts(newProc, origProc .getOutputPorts())); newProc.setDispatchStack(parseDispatchStack(origProc .getDispatchStack())); newProc .setIterationStrategyStack(parseIterationStrategyStack(origProc .getIterationStrategyStack())); newProcessors.add(newProc); for (Activity origActivity : origProc.getActivities().getActivity()) { parseActivityBinding(origActivity); } } currentProcessor.remove(); return newProcessors; } @SuppressWarnings("unchecked") public TavernaResearchObject parseT2Flow(File t2File) throws IOException, ParseException, JAXBException { JAXBElement<uk.org.taverna.scufl2.xml.t2flow.jaxb.Workflow> root = (JAXBElement<uk.org.taverna.scufl2.xml.t2flow.jaxb.Workflow>) unmarshaller .unmarshal(t2File); return parseT2Flow(root.getValue()); } @SuppressWarnings("unchecked") public TavernaResearchObject parseT2Flow(InputStream t2File) throws IOException, JAXBException, ParseException { JAXBElement<uk.org.taverna.scufl2.xml.t2flow.jaxb.Workflow> root = (JAXBElement<uk.org.taverna.scufl2.xml.t2flow.jaxb.Workflow>) unmarshaller .unmarshal(t2File); return parseT2Flow(root.getValue()); } public TavernaResearchObject parseT2Flow( uk.org.taverna.scufl2.xml.t2flow.jaxb.Workflow wf) throws ParseException { TavernaResearchObject ro = new TavernaResearchObject(); currentResearchObject.set(ro); makeDefaultBindings(wf); for (Dataflow df : wf.getDataflow()) { Workflow workflow = parseDataflow(df); if (df.getRole().equals(Role.TOP)) { ro.setMainWorkflow(workflow); } ro.getWorkflows().add(workflow); } if (isStrict() || ro.getMainWorkflow() == null) { throw new ParseException("No main workflow"); } currentResearchObject.remove(); return ro; } public void setStrict(boolean strict) { this.strict = strict; } }
package brooklyn.entity.monitoring.monit; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import brooklyn.entity.Entity; import brooklyn.entity.basic.SoftwareProcessImpl; import brooklyn.event.feed.ssh.SshFeed; import brooklyn.event.feed.ssh.SshPollConfig; import brooklyn.event.feed.ssh.SshPollValue; import brooklyn.location.Location; import brooklyn.location.basic.SshMachineLocation; import brooklyn.util.text.Strings; import brooklyn.util.time.Duration; import com.google.common.base.Function; import com.google.common.collect.Iterables; public class MonitNodeImpl extends SoftwareProcessImpl implements MonitNode { private static final Logger LOG = LoggerFactory.getLogger(MonitNodeImpl.class); private SshFeed feed; public MonitNodeImpl() { } public MonitNodeImpl(Map flags) { super(flags, null); } public MonitNodeImpl(Map flags, Entity parent) { super(flags, parent); } @Override public Class getDriverInterface() { return MonitDriver.class; } @Override public MonitDriver getDriver() { return (MonitDriver) super.getDriver(); } @Override protected void connectSensors() { super.connectSensors(); Location machine = Iterables.get(getLocations(), 0, null); if (machine instanceof SshMachineLocation) { String cmd = getDriver().getStatusCmd(); feed = SshFeed.builder() .entity(this) .period(Duration.FIVE_SECONDS) .machine((SshMachineLocation) machine) .poll(new SshPollConfig<Boolean>(SERVICE_UP) .command(cmd) .setOnSuccess(true) .setOnFailureOrException(false)) .poll(new SshPollConfig<String>(MONIT_TARGET_PROCESS_NAME) .command(cmd) .onSuccess(new Function<SshPollValue, String>() { @Override public String apply(SshPollValue input) { String process = Strings.getFirstWordAfter(input.getStdout(), "Process"); return process; } }) .setOnFailureOrException(null)) .poll(new SshPollConfig<String>(MONIT_TARGET_PROCESS_STATUS) .command(cmd) .onSuccess(new Function<SshPollValue, String>() { @Override public String apply(SshPollValue input) { String status = Strings.getFirstWordAfter(input.getStdout(), "status"); return status; } }) .setOnFailureOrException(null)) .build(); } else { LOG.warn("Location(s) {} not an ssh-machine location, so not polling for status; setting serviceUp immediately", getLocations()); setAttribute(SERVICE_UP, true); } } @Override protected void disconnectSensors() { if (feed != null) feed.stop(); } @Override public String getShortName() { return "Monit"; } }
package gov.nih.nci.evs.browser.utils; import java.io.*; import java.util.*; import org.LexGrid.LexBIG.DataModel.Core.CodingSchemeVersionOrTag; import org.LexGrid.LexBIG.Utility.Constructors; import org.json.JSONArray; import org.json.JSONObject; import org.lexevs.tree.json.JsonConverter; import org.lexevs.tree.json.JsonConverterFactory; import org.lexevs.tree.model.LexEvsTree; import org.lexevs.tree.model.LexEvsTreeNode; import org.lexevs.tree.model.LexEvsTreeNode.ExpandableStatus; import org.lexevs.tree.service.TreeService; import org.lexevs.tree.service.TreeServiceFactory; import org.lexevs.tree.dao.iterator.ChildTreeNodeIterator; import org.apache.log4j.*; /** * @author EVS Team * @version 1.0 * * Modification history Initial implementation kim.ong@ngc.com * */ // Note: Version with the has more (...) nodes feature. public class ViewInHierarchyUtils { private int MAX_CHILDREN = 5; private static Logger _logger = Logger.getLogger(DataUtils.class); private static Random rand = new Random(); int has_more_node_knt = 0; private String generateRandomString() { int i = rand.nextInt(); String t = new Integer(i).toString(); t = t.replace("-", "n"); return "_" + t; } private String generateID(LexEvsTreeNode node) { String node_id = null; if (node == null) { node_id = "root"; } else { node_id = "N_" + replaceNodeID(node.getCode()); } return node_id; } public ViewInHierarchyUtils() { has_more_node_knt = 0; } private static void println(PrintWriter out, String text) { gov.nih.nci.evs.browser.servlet.AjaxServlet.println(out, text); } private String replaceNodeID(String code) { /* code = code.replaceAll(":", "cCc"); code = code.replaceAll("-", "cDc"); code = code.replaceAll("_", "cUc"); code = code.replaceAll("/", "cSc"); code = code.replaceAll(".", "cEc"); return code; */ String s = "" + code.hashCode(); s = s.replace("-", "n"); return s + generateRandomString(); } /* private String replaceNodeID(String code) { code = code.replaceAll(":", "cCc"); code = code.replaceAll("-", "cDc"); code = code.replaceAll("_", "cUc"); //code = code.replaceAll("/", "cSc"); //code = code.replaceAll(".", "cEc"); return code; } */ private String restoreNodeID(String code) { code = code.replaceAll("cCc", ":"); code = code.replaceAll("cDc", "-"); code = code.replaceAll("cUc", "_"); //code = code.replaceAll("cSc", "/"); //code = code.replaceAll("cEc", "."); return code; } public ViewInHierarchyUtils(String codingScheme, String version, String code) { has_more_node_knt = 0; try { PrintWriter pw = new PrintWriter(System.out, true); printTree(pw, codingScheme, version, code); } catch (Exception e) { System.out.println(e.getClass().getName() + ": " + e.getMessage()); } } public void printTree(PrintWriter out, String codingScheme, String version, String code) { try { TreeService service = TreeServiceFactory.getInstance().getTreeService( RemoteServerUtil.createLexBIGService()); long start = System.currentTimeMillis(); CodingSchemeVersionOrTag csvt = null; if (version != null && version.length() > 0) csvt = Constructors.createCodingSchemeVersionOrTagFromVersion(version); String namespace = DataUtils.getNamespaceByCode(codingScheme, version, code); LexEvsTree tree = service.getTree(codingScheme, csvt, code, namespace); List<LexEvsTreeNode> listEvsTreeNode = service.getEvsTreeConverter() .buildEvsTreePathFromRootTree(tree.getCurrentFocus()); LexEvsTreeNode root = null; printTree(out, "", code, root, "root", listEvsTreeNode); } catch (Exception e) { _logger.error(e.getClass().getSimpleName() + ": " + e.getMessage()); } } private void printTree(PrintWriter out, String indent, String focus_code, LexEvsTreeNode parent, String parent_node_id, List<LexEvsTreeNode> nodes) { for (LexEvsTreeNode node : nodes) { char c = ' '; String node_id = generateID(node); if (node.getExpandableStatus() == LexEvsTreeNode.ExpandableStatus.IS_EXPANDABLE) { c = node.getPathToRootChildren() != null ? '-' : '+'; } printTreeNode(out, indent, focus_code, node, node_id, parent, parent_node_id); List<LexEvsTreeNode> list_children = node.getPathToRootChildren(); if (list_children != null) { printTree(out, indent + " ", focus_code, node, node_id, list_children); } } } private void printTreeNode(PrintWriter out, String indent, String focus_code, LexEvsTreeNode node, String node_id, LexEvsTreeNode parent, String parent_node_id) { if (node == null) return; try { LexEvsTreeNode.ExpandableStatus node_status = node.getExpandableStatus(); String image = "[+]"; boolean expandable = true; if (node_status != LexEvsTreeNode.ExpandableStatus.IS_EXPANDABLE) { image = "."; expandable = false; } boolean expanded = false; if (node_status == LexEvsTreeNode.ExpandableStatus.IS_EXPANDABLE) { List<LexEvsTreeNode> list_children = node.getPathToRootChildren(); if (list_children != null && list_children.size() > 0) { expanded = true; } } String parent_code = null; if (parent != null) { parent_code = parent.getCode(); } /* String parent_id = null; if (parent == null) { parent_id = "root"; } else { //parent_id = replaceNodeID("N_" + parent.getCode()); parent_id = "N_" + replaceNodeID(parent.getCode()); } */ String code = node.getCode(); boolean isHasMoreNode = false; if (code.compareTo("...") == 0) { isHasMoreNode = true; has_more_node_knt++; if (parent == null) { code = "root" + "_" + focus_code + "_dot_" + new Integer(has_more_node_knt).toString(); } else { code = parent.getCode() + "_dot_" + new Integer(has_more_node_knt).toString(); } } String node_label = node_id;//"N_" + replaceNodeID(code); String node_name = node.getEntityDescription(); String indentStr = indent + " "; String symbol = getNodeSymbol(node); println(out, ""); println(out, indentStr + "// " + symbol + " " + node_name + "(" + code + ")"); println(out, indentStr + "newNodeDetails = \"javascript:onClickTreeNode('" + code + "');\";"); // [GF#32225] View-In-Hierarchy page fails to render on tree node label containing double quote characters. KLO, 061312 if (node_name.indexOf("\"") != -1) { node_name = replaceAll(node_name, "\"", "'"); } println(out, indentStr + "newNodeData = { label:\"" + node_name + "\", id:\"" + code + "\", href:newNodeDetails };"); if (expanded) { println(out, indentStr + "var " + node_label + " = new YAHOO.widget.TextNode(newNodeData, " + parent_node_id + ", true);"); } else if (isHasMoreNode) { println(out, indentStr + "var " + node_label + " = new YAHOO.widget.TextNode(newNodeData, " + parent_node_id + ", false);"); } else { println(out, indentStr + "var " + node_label + " = new YAHOO.widget.TextNode(newNodeData, " + parent_node_id + ", false);"); } if (expandable || isHasMoreNode) { println(out, indentStr + node_label + ".isLeaf = false;"); println(out, indentStr + node_label + ".ontology_node_child_count = 1;"); //if (node.getPathToRootChildren() == null && !isHasMoreNode) if (node.getPathToRootChildren() == null) { println(out, indentStr + node_label + ".setDynamicLoad(loadNodeData);"); } } else { println(out, indentStr + node_label + ".ontology_node_child_count = 0;"); println(out, indentStr + node_label + ".isLeaf = true;"); } if (focus_code.compareTo(code) == 0) { println(out, indentStr + node_label + ".labelStyle = \"ygtvlabel_highlight\";"); } } catch (Exception ex) { } } private static String getNodeSymbol(LexEvsTreeNode node) { String symbol = "@"; if (node.getExpandableStatus() == LexEvsTreeNode.ExpandableStatus.IS_EXPANDABLE) { symbol = node.getPathToRootChildren() != null ? "-" : "+"; } return symbol; } public List<LexEvsTreeNode> getChildren(String codingScheme, String version, String parent_code, boolean from_root) { // root: input parent_code = "@" or "@@"; List<LexEvsTreeNode> list = new ArrayList(); CodingSchemeVersionOrTag versionOrTag = new CodingSchemeVersionOrTag(); if (version != null) versionOrTag.setVersion(version); TreeService treeService = TreeServiceFactory.getInstance().getTreeService( RemoteServerUtil.createLexBIGService()); LexEvsTree lexEvsTree = treeService.getTree(codingScheme, versionOrTag, parent_code); LexEvsTreeNode parent_node = null; if (!from_root) { parent_node = lexEvsTree.findNodeInTree(parent_code); } else { parent_node = lexEvsTree.findNodeInTree("@@"); if (parent_node == null) { parent_node = lexEvsTree.findNodeInTree("@"); } } if (parent_node == null) { return null; } LexEvsTreeNode.ExpandableStatus parent_node_status = parent_node.getExpandableStatus(); if (parent_node_status == LexEvsTreeNode.ExpandableStatus.IS_EXPANDABLE) { ChildTreeNodeIterator itr = parent_node.getChildIterator(); try { HashSet hset = new HashSet(); int lcv = 0; while(itr.hasNext()){ LexEvsTreeNode child = itr.next(); lcv++; if (child != null) { String child_code = child.getCode(); //System.out.println("(" + lcv + ") " + "getChildren child_code " + child_code ); if (!hset.contains(child_code)) { hset.add(child_code); list.add(child); } else { break; } } else { break; } } } catch (Exception ex) { //ex.printStackTrace(); _logger.debug("WARNING: ChildTreeNodeIterator exception..."); } } return list; } public HashMap getRemainingSubconcepts(String codingScheme, String version, String focus_code, boolean from_root) { HashMap hmap = new HashMap(); String childNavText = "inverse_is_a"; long ms = System.currentTimeMillis(); TreeItem ti = new TreeItem(focus_code, ""); ti._expandable = false; List<LexEvsTreeNode> list = getChildren(codingScheme, version, focus_code, from_root); if (list.size() > MAX_CHILDREN) { for (int i=MAX_CHILDREN; i<list.size(); i++) { LexEvsTreeNode child = (LexEvsTreeNode) list.get(i); TreeItem childItem = new TreeItem(child.getCode(), child.getEntityDescription()); childItem._expandable = false; LexEvsTreeNode.ExpandableStatus child_node_status = child.getExpandableStatus(); if (child_node_status == LexEvsTreeNode.ExpandableStatus.IS_EXPANDABLE) { childItem._expandable = true; } ti._expandable = true; ti.addChild(childNavText, childItem); } } hmap.put(focus_code, ti); return hmap; } public HashMap getSubconcepts(String codingScheme, String version, String focus_code) { HashMap hmap = new HashMap(); String childNavText = "inverse_is_a"; long ms = System.currentTimeMillis(); TreeItem ti = new TreeItem(focus_code, ""); ti._expandable = false; List<LexEvsTreeNode> list = getChildren(codingScheme, version, focus_code, false); if (list.size() > 0) { for (int i=0; i<list.size(); i++) { LexEvsTreeNode child = (LexEvsTreeNode) list.get(i); TreeItem childItem = new TreeItem(child.getCode(), child.getEntityDescription()); childItem._expandable = false; LexEvsTreeNode.ExpandableStatus child_node_status = child.getExpandableStatus(); if (child_node_status == LexEvsTreeNode.ExpandableStatus.IS_EXPANDABLE) { childItem._expandable = true; } ti._expandable = true; ti.addChild(childNavText, childItem); } } hmap.put(focus_code, ti); return hmap; } public static Vector<String> parseData(String line) { if (line == null) return null; String tab = "|"; return parseData(line, tab); } public static Vector<String> parseData(String line, String tab) { if (line == null) return null; Vector data_vec = new Vector(); StringTokenizer st = new StringTokenizer(line, tab); while (st.hasMoreTokens()) { String value = st.nextToken(); if (value.compareTo("null") == 0) value = " "; data_vec.add(value); } return data_vec; } public String getFocusCode(String ontology_node_id) { if (ontology_node_id == null) return null; if (ontology_node_id.indexOf("_dot_") == -1) { return ontology_node_id; } Vector v = parseData(ontology_node_id, "_"); for (int i=0; i<v.size(); i++) { String t = (String) v.elementAt(i); } if (v.contains("root")) { return restoreNodeID((String) v.elementAt(1)); } return restoreNodeID((String) v.elementAt(0)); } public static String replaceAll(String t, String s1, String s2) { int n = t.indexOf(s1); while (n != -1) { if (n > 0) { t = t.substring(0, n) + s2 + t.substring(n+s1.length(), t.length()); } else { t = s2 + t.substring(n+s1.length(), t.length()); } n = t.indexOf(s1); } return t; } public static void main(String[] args) throws Exception { new ViewInHierarchyUtils("NCI_Thesaurus", "11.09d", "C37927"); // Color } }
package fitnesse.slim.fixtureInteraction; import java.lang.reflect.Constructor; import java.lang.reflect.Method; import java.util.HashMap; import java.util.Map; public class CachedInteraction extends DefaultInteraction { private static final Constructor<?> noConstructor = NotExisting.class.getConstructors()[0]; private static final Method noMethod = NotExisting.class.getDeclaredMethods()[0]; private final Map<String, Constructor<?>> constructorsByClassAndArgs = new HashMap<>(); private final Map<String, Class<?>> classCache = new HashMap<>(); private final Map<MethodKey, Method> methodsByNameAndArgs = new HashMap<>(); @Override protected Constructor<?> getConstructor(Class<?> clazz, Object[] args) { String key = String.format("%s_%d", clazz.getName(), args.length); Constructor<?> cached = constructorsByClassAndArgs.get(key); if (cached == noConstructor) return null; if (cached != null) return cached; Constructor<?> constructor = handleConstructorCacheMiss(clazz, args); if (constructor == null) { constructorsByClassAndArgs.put(key, noConstructor); } else { constructorsByClassAndArgs.put(key, constructor); } return constructor; } @Override protected Class<?> getClass(String className) { Class<?> k = classCache.get(className); if (k == NotExisting.class) return null; if (k != null) return k; k = handleClassCacheMiss(className); if (k == null) { classCache.put(className, NotExisting.class); } else { classCache.put(className, k); } return k; } @Override protected Method findMatchingMethod(String methodName, Class<?> k, int nArgs) { MethodKey key = new MethodKey(k, methodName, nArgs); Method cached = methodsByNameAndArgs.get(key); if (cached == noMethod) return null; if (cached != null) return cached; Method method = handleMethodCacheMiss(methodName, k, nArgs); if (method == null) { methodsByNameAndArgs.put(key, noMethod); } else { methodsByNameAndArgs.put(key, method); } return method; } protected Constructor<?> handleConstructorCacheMiss(Class<?> clazz, Object[] args) { return super.getConstructor(clazz, args); } protected Class<?> handleClassCacheMiss(String className) { return super.getClass(className); } protected Method handleMethodCacheMiss(String methodName, Class<?> k, int nArgs) { return super.findMatchingMethod(methodName, k, nArgs); } private static final class MethodKey { private final String k; private final String method; private final int nArgs; public MethodKey(Class<?> k, String method, int nArgs) { this.k = k.getSimpleName(); this.method = method; this.nArgs = nArgs; } @Override public int hashCode() { int result = k.hashCode(); result = 31 * result + method.hashCode(); result = 31 * result + nArgs; return result; } @Override public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; MethodKey methodKey = (MethodKey) o; if (nArgs != methodKey.nArgs) return false; if (!k.equals(methodKey.k)) return false; return method.equals(methodKey.method); } } private static final class NotExisting { public NotExisting() {} public void doIt() {} } }
package gov.lanl.adore.djatoka.openurl; import gov.lanl.adore.djatoka.IExtract; import gov.lanl.adore.djatoka.io.FormatConstants; import gov.lanl.adore.djatoka.kdu.KduExtractExe; import gov.lanl.adore.djatoka.util.IOUtils; import gov.lanl.adore.djatoka.util.ImageProcessingUtils; import gov.lanl.adore.djatoka.util.ImageRecord; import gov.lanl.util.HttpDate; import info.openurl.oom.ContextObject; import info.openurl.oom.OpenURLRequest; import info.openurl.oom.OpenURLRequestProcessor; import info.openurl.oom.OpenURLResponse; import info.openurl.oom.Service; import info.openurl.oom.config.ClassConfig; import info.openurl.oom.config.OpenURLConfig; import info.openurl.oom.entities.ServiceType; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URISyntaxException; import java.util.HashMap; import java.util.Properties; import javax.servlet.http.HttpServletResponse; import org.apache.log4j.Logger; /** * The OpenURLJP2KMetadata OpenURL Service * * @author Ryan Chute */ public class OpenURLJP2KMetadata implements Service, FormatConstants { static Logger logger = Logger.getLogger(OpenURLJP2KMetadata.class); private static final String DEFAULT_IMPL_CLASS = SimpleListResolver.class.getCanonicalName(); private static final String PROPS_KEY_IMPL_CLASS = "OpenURLJP2KService.referentResolverImpl"; private static final String SVC_ID = "info:lanl-repo/svc/getMetadata"; private static String implClass = null; private static Properties props = new Properties(); /** * Construct an info:lanl-repo/svc/getMetadata web service class. Initializes * Referent Resolver instance using OpenURLJP2KService.referentResolverImpl property. * * @param openURLConfig OOM Properties forwarded from OpenURLServlet * @param classConfig Implementation Properties forwarded from OpenURLServlet * @throws ResolverException */ public OpenURLJP2KMetadata(OpenURLConfig openURLConfig, ClassConfig classConfig) throws ResolverException { try { if (!ReferentManager.isInit()) { props = IOUtils.loadConfigByCP(classConfig.getArg("props")); implClass = props.getProperty(PROPS_KEY_IMPL_CLASS,DEFAULT_IMPL_CLASS); ReferentManager.init((IReferentResolver) Class.forName(implClass).newInstance(), props); } } catch (IOException e) { throw new ResolverException("Error attempting to open props file from classpath, disabling " + SVC_ID + " : " + e.getMessage()); } catch (Exception e) { throw new ResolverException("Unable to inititalize implementation: " + props.getProperty(implClass) + " - " + e.getMessage()); } } /** * Returns the OpenURL service identifier for this implementation of * info.openurl.oom.Service */ public URI getServiceID() throws URISyntaxException { return new URI(SVC_ID); } /** * Returns the OpenURLResponse of a JSON object defining the core image * properties. Having obtained a result, this method is then responsible for * transforming it into an OpenURLResponse that acts as a proxy for * HttpServletResponse. */ public OpenURLResponse resolve(ServiceType serviceType, ContextObject contextObject, OpenURLRequest openURLRequest, OpenURLRequestProcessor processor) { String responseFormat = null; int status = HttpServletResponse.SC_OK; ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { baos = new ByteArrayOutputStream(); IExtract jp2 = new KduExtractExe(); ImageRecord r = ReferentManager.getImageRecord(contextObject.getReferent()); r = jp2.getMetadata(r); StringBuffer sb = new StringBuffer(); sb.append("{"); sb.append("\n\"identifier\": \"" + r.getIdentifier() + "\","); sb.append("\n\"imagefile\": \"" + r.getImageFile() + "\","); sb.append("\n\"width\": \"" + r.getWidth() + "\","); sb.append("\n\"height\": \"" + r.getHeight() + "\","); sb.append("\n\"dwtLevels\": \"" + r.getLevels() + "\","); sb.append("\n\"levels\": \"" + ImageProcessingUtils.getLevelCount(r.getWidth() , r.getHeight()) + "\","); sb.append("\n\"compositingLayerCount\": \"" + r.getCompositingLayerCount() + "\""); sb.append("\n}"); baos.write(sb.toString().getBytes()); } catch (Exception e) { baos = new ByteArrayOutputStream(); try { if (e.getMessage() != null) baos.write(e.getMessage().getBytes("UTF-8")); else { logger.error(e,e); baos.write("Internal Server Error: ".getBytes()); } } catch (UnsupportedEncodingException e1) { e1.printStackTrace(); } catch (IOException e2) { e2.printStackTrace(); } responseFormat = "text/plain"; status = HttpServletResponse.SC_INTERNAL_SERVER_ERROR; } HashMap<String, String> header_map = new HashMap<String, String>(); header_map.put("Content-Length", baos.size() + ""); header_map.put("Date", HttpDate.getHttpDate()); return new OpenURLResponse(status, responseFormat, baos.toByteArray(), header_map); } }
/** * This is a Data Transfer Object that sends and recieves data between * ExecuteWorkflowAction and ExecuteWorkflowService objects * */ package gov.nih.nci.calab.dto.workflow; /** * @author caLAB Team * */ public class ExecuteWorkflowBean { private String inFileName; private String outFileName; private String assayType; private String assayName; private String runName; private String runDate; private String runBy; private List<String> aliquotIds; private String fileSubmissionDate; private String fileSubmitter; private String fileMaskStatus; public ExecuteWorkflowBean( String inFileName, String outFileName, String assayType, String assayName, String runName, String runDate, String runBy, List <String> aliquotIds, String fileSubmissionDate, String fileSubmitter, String fileMaskStatus) { super(); this.inFileName=inFileName; this.outFileName=outFileName; this.assayType=assayType; this.assayName=assayName; this.runName=runName; this.runDate=runDate; this.runBy=runBy; this.aliquotIds=aliquotIds; this.fileSubmissionDate=fileSubmissionDate; this.fileSubmitter=fileSubmitter; this.fileMaskStatus=fileMaskStatus; } public String getInFileName() { return inFileName; } public void setInFileName(String inFileName) { this.inFileName=inFileName; } public String getOutFileName() { return outFileName; } public void setOutFileName(String outFileName) { this.outFileName=outFileName; } public String getAssayType() { return assayType; } public void setAssayType(String assayType) { this.assayType=assayType; } public String getAssayName() { return assayName; } public void setAssayName(String assayName) { this.assayName=assayName; } public String getRunName() { return runName; } public void setRunName(String runName) { this.runName=runName; } public String getRunDate() { return runDate; } public void setRunDate(String runDate) { this.runDate=runDate; } public String getRunBy() { return runBy; } public void setRunBy(String runBy) { this.runBy=runBy; } public List<String> getAliquotIds() { return aliquotIds; } public void setAliquotIds(List<String> aliquotIds) { this.aliquotIds=aliquotIds; } public String getFileSubmissionDate() { return fileSubmissionDate; } public void setFileSubmissionDate(String fileSubmissionDate) { this.fileSubmissionDate=fileSubmissionDate; } public String getFileSubmitter() { return fileSubmitter; } public void setFileSubmitter(String fileSubmitter) { this.fileSubmitter=fileSubmitter; } public String getFileMaskStatus() { return fileMaskStatus; } public void setFileMaskStatus(String fileMaskStatus) { this.fileMaskStatus=fileMaskStatus; } }
package gov.nih.nci.cananolab.service.report; import gov.nih.nci.cananolab.domain.common.Report; import gov.nih.nci.cananolab.domain.particle.NanoparticleSample; import gov.nih.nci.cananolab.dto.common.ReportBean; import gov.nih.nci.cananolab.exception.CaNanoLabSecurityException; import gov.nih.nci.cananolab.exception.ReportException; import gov.nih.nci.cananolab.service.common.FileService; import gov.nih.nci.cananolab.service.particle.NanoparticleSampleService; import gov.nih.nci.cananolab.service.security.AuthorizationService; import gov.nih.nci.cananolab.system.applicationservice.CustomizedApplicationService; import gov.nih.nci.cananolab.util.CaNanoLabConstants; import gov.nih.nci.system.client.ApplicationServiceProvider; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import org.apache.log4j.Logger; import org.hibernate.FetchMode; import org.hibernate.criterion.CriteriaSpecification; import org.hibernate.criterion.DetachedCriteria; import org.hibernate.criterion.MatchMode; import org.hibernate.criterion.Property; import org.hibernate.criterion.Restrictions; /** * This class includes methods invovled in submiting and searching reports. * * @author pansu * */ public class ReportService { private static Logger logger = Logger.getLogger(ReportService.class); /** * Persist a new report or update an existing report * * @param report * @throws Exception */ public void saveReport(Report report, String[] particleNames, byte[] fileData) throws ReportException { try { CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider .getApplicationService(); NanoparticleSampleService sampleService = new NanoparticleSampleService(); Set<NanoparticleSample> particleSamples = new HashSet<NanoparticleSample>(); for (String name : particleNames) { NanoparticleSample sample = sampleService .findNanoparticleSampleByName(name); particleSamples.add(sample); } if (report.getId() != null) { try { Report dbReport = (Report) appService.get(Report.class, report.getId()); // don't change createdBy and createdDate it is already // persisted report.setCreatedBy(dbReport.getCreatedBy()); report.setCreatedDate(dbReport.getCreatedDate()); // load fileName and uri if no new data has been uploaded or // no new url has been entered if (fileData == null || !report.getUriExternal()) { report.setName(dbReport.getName()); } } catch (Exception e) { String err = "Object doesn't exist in the database anymore. Please log in again."; logger.error(err); throw new ReportException(err, e); } } if (report.getNanoparticleSampleCollection() == null) { report .setNanoparticleSampleCollection(new HashSet<NanoparticleSample>()); } for (NanoparticleSample sample : particleSamples) { report.getNanoparticleSampleCollection().add(sample); sample.getReportCollection().add(report); } appService.saveOrUpdate(report); // save to the file system fileData is not empty FileService fileService = new FileService(); fileService.writeFile(report, fileData); // TODO save other report type } catch (Exception e) { String err = "Error in saving the nanoparticle sample."; logger.error(err, e); throw new ReportException(err, e); } } public List<ReportBean> findReportsBy(String reportTitle, String reportCategory, String[] nanoparticleEntityClassNames, String[] functionalizingEntityClassNames, String[] functionClassNames) throws ReportException, CaNanoLabSecurityException { List<ReportBean> reports = new ArrayList<ReportBean>(); try { DetachedCriteria crit = DetachedCriteria.forClass(Report.class); if (reportTitle != null & reportTitle.length() > 0) { crit.add(Restrictions.ilike("title", reportTitle, MatchMode.ANYWHERE)); } if (reportCategory != null & reportCategory.length() > 0) { crit.add(Restrictions.eq("category", reportCategory)); } crit.setFetchMode("nanoparticleSampleCollection", FetchMode.JOIN); crit .setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY); CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider .getApplicationService(); List results = appService.query(crit); for (Object obj : results) { Report report = (Report) obj; reports.add(new ReportBean(report)); } List<ReportBean> compositionFiltered = filterByCompositions( nanoparticleEntityClassNames, functionalizingEntityClassNames, reports); List<ReportBean> theReports = filterByFunctions(functionClassNames, compositionFiltered); return theReports; } catch (Exception e) { String err = "Problem finding report info."; logger.error(err, e); throw new ReportException(err, e); } } private List<ReportBean> filterByFunctions(String[] functionClassNames, List<ReportBean> reports) { NanoparticleSampleService sampleService = new NanoparticleSampleService(); if (functionClassNames != null && functionClassNames.length > 0) { List<ReportBean> filteredList = new ArrayList<ReportBean>(); for (ReportBean report : reports) { SortedSet<String> storedFunctions = new TreeSet<String>(); for (NanoparticleSample particle : ((Report) report .getDomainFile()).getNanoparticleSampleCollection()) { storedFunctions.addAll(sampleService .getStoredFunctionClassNames(particle)); } for (String func : functionClassNames) { // if at least one function type matches, keep the // report if (storedFunctions.contains(func)) { filteredList.add(report); break; } } } return filteredList; } else { return reports; } } private List<ReportBean> filterByCompositions( String[] nanoparticleEntityClassNames, String[] functionalizingEntityClassNames, List<ReportBean> reports) { NanoparticleSampleService sampleService = new NanoparticleSampleService(); List<ReportBean> filteredList1 = new ArrayList<ReportBean>(); if (nanoparticleEntityClassNames != null && nanoparticleEntityClassNames.length > 0) { for (ReportBean report : reports) { SortedSet<String> storedEntities = new TreeSet<String>(); for (NanoparticleSample particle : ((Report) report .getDomainFile()).getNanoparticleSampleCollection()) { storedEntities.addAll(sampleService .getStoredNanoparticleEntityClassNames(particle)); } for (String entity : nanoparticleEntityClassNames) { // if at least one function type matches, keep the report if (storedEntities.contains(entity)) { filteredList1.add(report); break; } } } } else { filteredList1 = reports; } List<ReportBean> filteredList2 = new ArrayList<ReportBean>(); if (functionalizingEntityClassNames != null && functionalizingEntityClassNames.length > 0) { for (ReportBean report : reports) { SortedSet<String> storedEntities = new TreeSet<String>(); for (NanoparticleSample particle : ((Report) report .getDomainFile()).getNanoparticleSampleCollection()) { storedEntities .addAll(sampleService .getStoredFunctionalizingEntityClassNames(particle)); } for (String entity : functionalizingEntityClassNames) { // if at least one function type matches, keep the report if (storedEntities.contains(entity)) { filteredList2.add(report); break; } } } } else { filteredList2 = reports; } if (filteredList1.size() >= filteredList2.size() && !filteredList2.isEmpty()) { filteredList1.retainAll(filteredList2); return filteredList1; } else { if (!filteredList1.isEmpty()) filteredList2.retainAll(filteredList1); return filteredList2; } } public ReportBean findReportById(String reportId) throws ReportException { ReportBean reportBean = null; try { AuthorizationService auth = new AuthorizationService( CaNanoLabConstants.CSM_APP_NAME); CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider .getApplicationService(); DetachedCriteria crit = DetachedCriteria.forClass(Report.class) .add(Property.forName("id").eq(new Long(reportId))); crit.setFetchMode("nanoparticleSampleCollection", FetchMode.JOIN); crit .setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY); List result = appService.query(crit); if (!result.isEmpty()) { Report report = (Report) result.get(0); reportBean = new ReportBean(report); } return reportBean; } catch (Exception e) { String err = "Problem finding the report by id: " + reportId; logger.error(err, e); throw new ReportException(err, e); } } }
package org.endeavourhealth.queuereader.routines; import com.google.common.base.Strings; import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVParser; import org.apache.commons.csv.CSVRecord; import org.apache.commons.io.FilenameUtils; import org.endeavourhealth.common.utility.FileHelper; import org.endeavourhealth.common.utility.FileInfo; import org.endeavourhealth.common.utility.JsonSerializer; import org.endeavourhealth.core.database.dal.DalProvider; import org.endeavourhealth.core.database.dal.admin.ServiceDalI; import org.endeavourhealth.core.database.dal.admin.SystemHelper; import org.endeavourhealth.core.database.dal.admin.models.Service; import org.endeavourhealth.core.database.dal.audit.ExchangeDalI; import org.endeavourhealth.core.database.dal.audit.models.Exchange; import org.endeavourhealth.core.fhirStorage.ServiceInterfaceEndpoint; import org.endeavourhealth.core.queueing.MessageFormat; import org.endeavourhealth.transform.common.AuditWriter; import org.endeavourhealth.transform.common.ExchangeHelper; import org.endeavourhealth.transform.common.ExchangePayloadFile; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.InputStreamReader; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.*; public class SD307 extends AbstractRoutine { private static final Logger LOG = LoggerFactory.getLogger(SD307.class); /** * finds affected TPP services for SD307 */ public static void findTppServicesMissingDeltas(boolean verbose, String odsCodeRegex) { LOG.debug("Finding TPP Services Missing Deltas " + odsCodeRegex); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); List<Service> services = serviceDal.getAll(); List<Service> servicesWithGaps = new ArrayList<>(); for (Service service : services) { Map<String, String> tags = service.getTags(); if (tags == null || !tags.containsKey("TPP")) { continue; } if (shouldSkipService(service, odsCodeRegex)) { continue; } LOG.debug("Doing " + service); boolean gapFound = findTppServicesMissingDeltasForService(verbose, service); if (gapFound) { servicesWithGaps.add(service); } } LOG.debug("Finished Finding TPP Services Missing Deltas " + odsCodeRegex); LOG.debug("Found " + servicesWithGaps.size() + " services with gaps"); for (Service service: servicesWithGaps) { LOG.debug("" + service); } } catch (Throwable t) { LOG.error("", t); } } private static boolean findTppServicesMissingDeltasForService(boolean verbose, Service service) throws Exception { ServiceInterfaceEndpoint endpoint = SystemHelper.findEndpointForSoftware(service, MessageFormat.TPP_CSV); if (endpoint == null) { LOG.warn("No TPP endpoint found for " + service); return false; } UUID serviceId = service.getId(); UUID systemId = endpoint.getSystemUuid(); String odsCode = service.getLocalId(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); LOG.debug("Found " + exchanges.size() + " exchanges"); Map<String, Map<String, List<DateRange>>> hmExtractConfiguration = new HashMap<>(); //exchange list is most-recent-first, so go backwards for (int i=exchanges.size()-1; i>=0; i Exchange exchange = exchanges.get(i); //let these be counted /*if (!ExchangeHelper.isAllowRequeueing(exchange)) { continue; }*/ String manifestFilePath = findFilePathInExchange(exchange, "Manifest"); if (Strings.isNullOrEmpty(manifestFilePath)) { attemptFixExchangeMissingManifest(exchange); manifestFilePath = findFilePathInExchange(exchange, "Manifest"); if (Strings.isNullOrEmpty(manifestFilePath)) { LOG.warn("Missing manifest file in exchange " + exchange.getId()); continue; } } InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(manifestFilePath); CSVParser parser = new CSVParser(isr, CSVFormat.DEFAULT.withHeader()); Iterator<CSVRecord> iterator = parser.iterator(); //FileName,IsDelta,IsReference,DateExtractFrom,DateExtractTo //20200705_1707 DateFormat dateFormat = new SimpleDateFormat("yyyyMMdd_HHmm"); String firstEndStr = null; while (iterator.hasNext()) { CSVRecord record = iterator.next(); String fileName = record.get("FileName"); String isDeltaYN = record.get("IsDelta"); String startStr = record.get("DateExtractFrom"); String endStr = record.get("DateExtractTo"); //the main hash map is keyed by the extract configuration part of the path, //since we have TPP practices that were in multiple SystmOne extract configurations String extractConfiguration = findExtractConfiguration(manifestFilePath); Map<String, List<DateRange>> hmFiles = hmExtractConfiguration.get(extractConfiguration); if (hmFiles == null) { hmFiles = new HashMap<>(); hmExtractConfiguration.put(extractConfiguration, hmFiles); } List<DateRange> list = hmFiles.get(fileName); if (list == null) { list = new ArrayList<>(); hmFiles.put(fileName, list); } //if it's a bulk, clear out what was before boolean isBulk = isDeltaYN.equalsIgnoreCase("N"); if (isBulk) { list.clear(); } DateRange r = new DateRange(); r.setManifestFilePath(manifestFilePath); r.setBulk(isBulk); r.setExchangeId(exchange.getId()); r.setFromStr(startStr); r.setToStr(endStr); if (!Strings.isNullOrEmpty(startStr)) { r.setFrom(dateFormat.parse(startStr)); } if (!Strings.isNullOrEmpty(endStr)) { r.setTo(dateFormat.parse(endStr)); } list.add(r); //why would any record have an empty start if (Strings.isNullOrEmpty(startStr) && Strings.isNullOrEmpty(endStr)) { //there's something odd about this file and it seems to have null dates in every manifest, so just ignore it if (!fileName.equals("SRAppointmentAttendees")) { LOG.warn("NULL start and end date for " + fileName + " in " + manifestFilePath); } } else if (Strings.isNullOrEmpty(startStr)) { //null start is OK if we're a bulk if (!isBulk) { LOG.warn("NULL start date for " + fileName + " in " + manifestFilePath); } } else if (Strings.isNullOrEmpty(endStr)) { LOG.warn("NULL end date for " + fileName + " in " + manifestFilePath); } if (!Strings.isNullOrEmpty(endStr)) { //verify if the end date is always the same for records if (firstEndStr == null) { firstEndStr = endStr; } else if (!firstEndStr.equalsIgnoreCase(endStr)) { LOG.error("Got multiple distinct end dates " + firstEndStr + " vs " + endStr + " in " + manifestFilePath); } } } parser.close(); } DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm"); boolean gapFound = false; if (hmExtractConfiguration.size() > 1) { LOG.error("Service is in MULTIPLE extract configurations"); } else { LOG.debug("Service is in ONE extract configurations"); } for (String extractConfiguration: hmExtractConfiguration.keySet()) { LOG.debug("Extract configuration " + extractConfiguration + " ==================================================================================================================================="); Map<String, List<DateRange>> hmFiles = hmExtractConfiguration.get(extractConfiguration); //LOG.debug("Cached " + hmFiles.size() + " file metadata, checking..."); List<String> fileNames = new ArrayList<>(hmFiles.keySet()); fileNames.sort((a, b) -> a.compareToIgnoreCase(b)); for (String fileName : fileNames) { List<DateRange> list = hmFiles.get(fileName); //don't care about files we don't process if (!getTransformedFileNames().contains(fileName)) { continue; } LOG.debug("Checking " + fileName + " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"); /*if (verbose) { LOG.debug("Checking " + fileName + " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"); }*/ DateRange lastDateRange = list.get(0); if (verbose) { LOG.debug(" " + lastDateRange); } for (int i = 1; i < list.size(); i++) { DateRange dateRange = list.get(i); Date previousStart = lastDateRange.getFrom(); Date previousEnd = lastDateRange.getTo(); Date currentStart = dateRange.getFrom(); Date currentEnd = dateRange.getTo(); if (verbose) { LOG.debug(" " + dateRange); } //if the start and end don't match up, then something is off if (currentStart == null) { LOG.warn(" NULL START DATE: " + odsCode + " " + extractConfiguration + "::" + fileName + " exchange " + dateRange.getExchangeId()); } else if (currentStart.equals(previousEnd)) { } else if (previousStart != null && currentStart.equals(previousStart) && currentEnd.equals(previousEnd)) { LOG.warn(" DUPLICATE FOUND: " + odsCode + " " + extractConfiguration + "::" + fileName + " exchange " + dateRange.getExchangeId() + " has range " + dateFormat.format(currentStart) + " - " + dateFormat.format(currentEnd) + " which is the same as previous"); } else if (currentStart.after(previousEnd)) { LOG.error(" GAP FOUND: " + odsCode + " " + extractConfiguration + "::" + fileName + " exchange " + dateRange.getExchangeId() + " expecting start " + dateFormat.format(previousEnd) + " but got " + dateFormat.format(currentStart)); gapFound = true; } else { LOG.warn(" GONE BACK: " + odsCode + " " + extractConfiguration + "::" + fileName + " exchange " + dateRange.getExchangeId() + " expecting start " + dateFormat.format(previousEnd) + " but got " + dateFormat.format(currentStart)); } lastDateRange = dateRange; } } } return gapFound; } private static Set<String> getTransformedFileNames() { Set<String> ret = new HashSet<>(); ret.add("SRCcg"); ret.add("SROrganisationBranch"); ret.add("SROrganisation"); ret.add("SRTrust"); ret.add("SRAppointmentFlags"); ret.add("SRAppointment"); ret.add("SRRota"); ret.add("SRVisit"); ret.add("SRChildAtRisk"); ret.add("SRCode"); ret.add("SRDrugSensitivity"); ret.add("SREvent"); ret.add("SRImmunisation"); ret.add("SRPersonAtRisk"); ret.add("SRProblem"); ret.add("SRRecall"); ret.add("SRReferralOut"); ret.add("SRRepeatTemplate"); ret.add("SRSpecialNotes"); ret.add("SRConfiguredListOption"); ret.add("SRCtv3"); ret.add("SRCtv3ToSnomed"); ret.add("SRImmunisationContent"); ret.add("SRMapping"); ret.add("SRMedicationReadCodeDetails"); return ret; } /** * extracts the element of the path that indicates the extract configuration it came from * e.g. * from * S3/discoverysftplanding/endeavour/sftpReader/TPP/YDDH3_08Y/2021-01-01T04.03.00/Split/E87711/SRManifest.csv * find * YDDH3_08Y */ private static String findExtractConfiguration(String filePath) { File f = new File(filePath); f = f.getParentFile(); //S3/discoverysftplanding/endeavour/sftpReader/TPP/YDDH3_08Y/2021-01-01T04.03.00/Split/E87711 f = f.getParentFile(); //S3/discoverysftplanding/endeavour/sftpReader/TPP/YDDH3_08Y/2021-01-01T04.03.00/Split f = f.getParentFile(); //S3/discoverysftplanding/endeavour/sftpReader/TPP/YDDH3_08Y/2021-01-01T04.03.00/ f = f.getParentFile(); //S3/discoverysftplanding/endeavour/sftpReader/TPP/YDDH3_08Y/ return f.getName(); } /** * on older exchanges, we didn't used to copy the SRManifest.csv file into the service-specific "Split" directory. * The SRManfiest files were copied over a while back, but the Exchange bodies weren't updated accordingly */ private static void attemptFixExchangeMissingManifest(Exchange exchange) throws Exception { String exchangeBody = exchange.getBody(); List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchangeBody); if (files.isEmpty()) { return; } LOG.debug("Attempting to fix Exchange " + exchange.getId() + " without SRManifest in body"); ExchangePayloadFile first = files.get(0); String firstPath = first.getPath(); String dir = FilenameUtils.getFullPath(firstPath); LOG.debug("Getting listing of " + dir); List<FileInfo> listing = FileHelper.listFilesInSharedStorageWithInfo(dir); for (FileInfo info: listing) { String path = info.getFilePath(); if (path.endsWith("SRManifest.csv")) { LOG.debug("Found manifest file at " + path); //get the file list again, but this time without the storage prefix, so we can use to populate the exchange body from it List<ExchangePayloadFile> filesNoPrefix = ExchangeHelper.parseExchangeBody(exchangeBody, false); first = filesNoPrefix.get(0); firstPath = first.getPath(); dir = FilenameUtils.getFullPath(firstPath); String newPath = FilenameUtils.concat(dir, "SRManifest.csv"); LOG.debug("Adding path to body " + newPath); ExchangePayloadFile f = new ExchangePayloadFile(); f.setPath(newPath); f.setSize(new Long(info.getSize())); f.setType("Manifest"); filesNoPrefix.add(f); String json = JsonSerializer.serialize(filesNoPrefix); //LOG.debug("New body: " + json); exchange.setBody(json); //fix on the DB AuditWriter.writeExchange(exchange); return; } } } private static class DateRange { private boolean isBulk; private String fromStr; private String toStr; private Date from; private Date to; private UUID exchangeId; private String manifestFilePath; public boolean isBulk() { return isBulk; } public void setBulk(boolean bulk) { isBulk = bulk; } public String getFromStr() { return fromStr; } public void setFromStr(String fromStr) { this.fromStr = fromStr; } public String getToStr() { return toStr; } public void setToStr(String toStr) { this.toStr = toStr; } public Date getFrom() { return from; } public void setFrom(Date from) { this.from = from; } public Date getTo() { return to; } public void setTo(Date to) { this.to = to; } public UUID getExchangeId() { return exchangeId; } public void setExchangeId(UUID exchangeId) { this.exchangeId = exchangeId; } public String getManifestFilePath() { return manifestFilePath; } public void setManifestFilePath(String manifestFilePath) { this.manifestFilePath = manifestFilePath; } @Override public String toString() { StringBuffer sb = new StringBuffer(); if (fromStr == null) { sb.append("NULL"); } else { sb.append(fromStr); } sb.append(" -> "); if (toStr == null) { sb.append("NULL"); } else { sb.append(toStr); } sb.append(" (exchange " + exchangeId + ")"); if (isBulk) { sb.append(" BULK"); } sb.append(" " + manifestFilePath); return sb.toString(); } } }
package org.rstudio.studio.client.panmirror.dialogs; import org.rstudio.core.client.ElementIds; import org.rstudio.core.client.StringUtil; import org.rstudio.core.client.dom.DomUtils; import org.rstudio.core.client.files.FileSystemItem; import org.rstudio.core.client.theme.DialogTabLayoutPanel; import org.rstudio.core.client.theme.VerticalTabPanel; import org.rstudio.core.client.widget.FormLabel; import org.rstudio.core.client.widget.ModalDialog; import org.rstudio.core.client.widget.NumericTextBox; import org.rstudio.core.client.widget.OperationWithInput; import org.rstudio.studio.client.RStudioGinjector; import org.rstudio.studio.client.common.GlobalDisplay; import org.rstudio.studio.client.panmirror.dialogs.model.PanmirrorAttrProps; import org.rstudio.studio.client.panmirror.dialogs.model.PanmirrorImageDimensions; import org.rstudio.studio.client.panmirror.dialogs.model.PanmirrorImageProps; import org.rstudio.studio.client.panmirror.uitools.PanmirrorUITools; import org.rstudio.studio.client.panmirror.uitools.PanmirrorUIToolsImage; import com.google.gwt.aria.client.Roles; import com.google.gwt.dom.client.Document; import com.google.gwt.event.dom.client.DomEvent; import com.google.gwt.user.client.ui.CheckBox; import com.google.gwt.user.client.ui.HasVerticalAlignment; import com.google.gwt.user.client.ui.HorizontalPanel; import com.google.gwt.user.client.ui.ListBox; import com.google.gwt.user.client.ui.Panel; import com.google.gwt.user.client.ui.TextBox; import com.google.gwt.user.client.ui.Widget; public class PanmirrorEditImageDialog extends ModalDialog<PanmirrorImageProps> { public PanmirrorEditImageDialog(PanmirrorImageProps props, PanmirrorImageDimensions dims, String resourceDir, boolean editAttributes, OperationWithInput<PanmirrorImageProps> operation) { super("Image", Roles.getDialogRole(), operation, () -> { // cancel returns null operation.execute(null); }); // natural width, height, and containerWidth (will be null if this // is an insert image dialog) dims_ = dims; // size props that we are going to reflect back to the caller. the idea is that // if the user makes no explicit edits of size props then we just return // exactly what we were passed. this allows us to show a width and height // for images that are 'unsized' (i.e. just use natural height and width). the // in-editor resizing shelf implements the same behavior. widthProp_ = props.width; heightProp_ = props.height; unitsProp_ = props.units; // image tab VerticalTabPanel imageTab = new VerticalTabPanel(ElementIds.VISUAL_MD_IMAGE_TAB_IMAGE); imageTab.addStyleName(RES.styles().dialog()); // panel for size controls (won't be added if this is an insert or !editAttributes) HorizontalPanel sizePanel = new HorizontalPanel(); sizePanel.addStyleName(RES.styles().spaced()); sizePanel.setVerticalAlignment(HasVerticalAlignment.ALIGN_MIDDLE); // image url picker imageTab.add(url_ = new PanmirrorImageChooser(FileSystemItem.createDir(resourceDir))); url_.addStyleName(RES.styles().spaced()); if (!StringUtil.isNullOrEmpty(props.src)) url_.setText(props.src); // when the url is changed we no longer know the image dimensions. in this case // just wipe out those props and remove the image sizing ui. note that immediately // after insert the size controls will appear at the bottom of the image. url_.addValueChangeHandler(value -> { widthProp_ = null; heightProp_ = null; unitsProp_ = null; dims_ = null; imageTab.remove(sizePanel); }); // width, height, units width_ = addSizeInput(sizePanel, ElementIds.VISUAL_MD_IMAGE_WIDTH, "Width:"); height_ = addSizeInput(sizePanel, ElementIds.VISUAL_MD_IMAGE_HEIGHT, "Height:"); heightAuto_ = createHorizontalLabel("(Auto)"); heightAuto_.addStyleName(RES.styles().heightAuto()); sizePanel.add(heightAuto_); units_ = addUnitsSelect(sizePanel); initSizeInputs(); // lock ratio lockRatio_ = new CheckBox("Lock ratio"); lockRatio_.addStyleName(RES.styles().lockRatioCheckbox()); lockRatio_.getElement().setId(ElementIds.VISUAL_MD_IMAGE_LOCK_RATIO); lockRatio_.setValue(props.lockRatio); sizePanel.add(lockRatio_); // update widthProp_ and height (if lockRatio) when width text box changes width_.addChangeHandler(event -> { String width = width_.getText(); widthProp_ = StringUtil.isNullOrEmpty(width) ? null : Double.parseDouble(width); if (widthProp_ != null && lockRatio_.getValue()) { double height = widthProp_ * (dims_.naturalHeight/dims_.naturalWidth); height_.setValue(uiTools_.roundUnit(height, units_.getSelectedValue())); heightProp_ = Double.parseDouble(height_.getValue()); } unitsProp_ = units_.getSelectedValue(); }); // update heightProp_ and width (if lockRatio) when height text box changes height_.addChangeHandler(event -> { String height = height_.getText(); heightProp_ = StringUtil.isNullOrEmpty(height) ? null : Double.parseDouble(height); if (heightProp_ != null && lockRatio_.getValue()) { double width = heightProp_ * (dims_.naturalWidth/dims_.naturalHeight); width_.setValue(uiTools_.roundUnit(width, units_.getSelectedValue())); widthProp_ = Double.parseDouble(width_.getValue()); } unitsProp_ = units_.getSelectedValue(); }); // do applicable unit conversion when units change units_.addChangeHandler(event -> { String width = width_.getText(); if (!StringUtil.isNullOrEmpty(width)) { double widthPixels = uiTools_.unitToPixels(Double.parseDouble(width), prevUnits_, dims_.containerWidth); double widthUnit = uiTools_.pixelsToUnit(widthPixels, units_.getSelectedValue(), dims_.containerWidth); width_.setText(uiTools_.roundUnit(widthUnit, units_.getSelectedValue())); widthProp_ = Double.parseDouble(width_.getValue()); } String height = height_.getText(); if (!StringUtil.isNullOrEmpty(height)) { double heightPixels = uiTools_.unitToPixels(Double.parseDouble(height), prevUnits_, dims_.containerWidth); double heightUnit = uiTools_.pixelsToUnit(heightPixels, units_.getSelectedValue(), dims_.containerWidth); height_.setText(uiTools_.roundUnit(heightUnit, units_.getSelectedValue())); heightProp_ = Double.parseDouble(height_.getValue()); } // track previous units for subsequent conversions prevUnits_ = units_.getSelectedValue(); // save units prop unitsProp_ = units_.getSelectedValue(); manageUnitsUI(); }); manageUnitsUI(); // only add sizing controls if we support editAttributes, dims have been provided // (i.e. not an insert operation) and there aren't width or height attributes // within props.keyvalue (which is an indicator that they use units unsupported // by our sizing UI (e.g. ch, em, etc.) if (editAttributes && dims_ != null && !hasSizeKeyvalue(props.keyvalue)) { imageTab.add(sizePanel); } // title and alt title_ = PanmirrorDialogsUtil.addTextBox(imageTab, ElementIds.VISUAL_MD_IMAGE_TITLE, "Title/Tooltip:", props.title); alt_ = PanmirrorDialogsUtil.addTextBox(imageTab, ElementIds.VISUAL_MD_IMAGE_ALT, "Caption/Alt:", props.alt); // standard pandoc attributes editAttr_ = new PanmirrorEditAttrWidget(); editAttr_.setAttr(props); if (editAttributes) { VerticalTabPanel attributesTab = new VerticalTabPanel(ElementIds.VISUAL_MD_IMAGE_TAB_ATTRIBUTES); attributesTab.addStyleName(RES.styles().dialog()); attributesTab.add(editAttr_); DialogTabLayoutPanel tabPanel = new DialogTabLayoutPanel("Image"); tabPanel.addStyleName(RES.styles().imageDialogTabs()); tabPanel.add(imageTab, "Image", imageTab.getBasePanelId()); tabPanel.add(attributesTab, "Attributes", attributesTab.getBasePanelId()); tabPanel.selectTab(0); mainWidget_ = tabPanel; } else { mainWidget_ = imageTab; } } @Override protected Widget createMainWidget() { return mainWidget_; } @Override public void focusFirstControl() { url_.getTextBox().setFocus(true); url_.getTextBox().setSelectionRange(0, 0); } @Override protected PanmirrorImageProps collectInput() { // process change event for focused size controls (typically these changes // only occur on the change event, which won't occur if the dialog is // dismissed while they are focused fireChangedIfFocused(width_); fireChangedIfFocused(height_); // collect and return result PanmirrorImageProps result = new PanmirrorImageProps(); result.src = url_.getTextBox().getValue().trim(); result.title = title_.getValue().trim(); result.alt = alt_.getValue().trim(); result.width = widthProp_; result.height = heightProp_; result.units = unitsProp_; result.lockRatio = lockRatio_.getValue(); PanmirrorAttrProps attr = editAttr_.getAttr(); result.id = attr.id; result.classes = attr.classes; result.keyvalue = attr.keyvalue; return result; } @Override protected boolean validate(PanmirrorImageProps result) { // width is required if height is specified if (height_.getText().trim().length() > 0) { GlobalDisplay globalDisplay = RStudioGinjector.INSTANCE.getGlobalDisplay(); String width = width_.getText().trim(); if (width.length() == 0) { globalDisplay.showErrorMessage( "Error", "You must provide a value for image width." ); width_.setFocus(true); return false; } else { return true; } } else { return true; } } // set sizing UI based on passed width, height, and unit props. note that // these can be null (default/natural sizing) and in that case we still // want to dispaly pixel sizing in the UI as an FYI to the user private void initSizeInputs() { // only init for existing images (i.e. dims passed) if (dims_ == null) return; String width = null, height = null, units = "px"; // if we have both width and height then use them if (widthProp_ != null && heightProp_ != null) { width = widthProp_.toString(); height = heightProp_.toString(); units = unitsProp_; } // if there is no width or height, use pixels else if (widthProp_ == null && heightProp_ == null) { width = dims_.naturalWidth.toString(); height = dims_.naturalHeight.toString(); units = "px"; } else if (dims_.naturalHeight != null && dims_.naturalWidth != null) { // if there is width only then show computed height units = unitsProp_; if (widthProp_ != null) { width = widthProp_.toString(); height = uiTools_.roundUnit(widthProp_ * (dims_.naturalHeight/dims_.naturalWidth), units); } else if (heightProp_ != null) { height = heightProp_.toString(); width = uiTools_.roundUnit(heightProp_ * (dims_.naturalWidth/dims_.naturalHeight), units); } } // set values into inputs width_.setValue(width); height_.setValue(height); for (int i = 0; i<units_.getItemCount(); i++) { if (units_.getItemText(i) == units) { units_.setSelectedIndex(i); // track previous units for conversions prevUnits_ = units; break; } } } // show/hide controls and enable/disable lockUnits depending on // whether we are using percent sizing private void manageUnitsUI() { boolean percentUnits = units_.getSelectedValue() == uiTools_.percentUnit(); if (percentUnits) { lockRatio_.setValue(true); lockRatio_.setEnabled(false); } else { lockRatio_.setEnabled(true); } height_.setVisible(!percentUnits); heightAuto_.setVisible(percentUnits); } // create a numeric input private static NumericTextBox addSizeInput(Panel panel, String id, String labelText) { FormLabel label = createHorizontalLabel(labelText); NumericTextBox input = new NumericTextBox(); input.setMin(1); input.setMax(10000); input.addStyleName(RES.styles().horizontalInput()); input.getElement().setId(id); label.setFor(input); panel.add(label); panel.add(input); return input; } // create units select list box private ListBox addUnitsSelect(Panel panel) { String[] options = uiTools_.validUnits(); ListBox units = new ListBox(); units.addStyleName(RES.styles().horizontalInput()); for (int i = 0; i < options.length; i++) units.addItem(options[i], options[i]); units.getElement().setId(ElementIds.VISUAL_MD_IMAGE_UNITS); panel.add(units); return units; } // create a horizontal label private static FormLabel createHorizontalLabel(String text) { FormLabel label = new FormLabel(text); label.addStyleName(RES.styles().horizontalLabel()); return label; } // fire a change event if the widget is currently focused private static void fireChangedIfFocused(Widget widget) { if (widget.getElement() == DomUtils.getActiveElement()) DomEvent.fireNativeEvent(Document.get().createChangeEvent(), widget); } // check whether the passed keyvalue attributes has a size (width or height) private static boolean hasSizeKeyvalue(String[][] keyvalue) { for (int i=0; i<keyvalue.length; i++) { String key = keyvalue[i][0]; if (key.equalsIgnoreCase(WIDTH) || key.equalsIgnoreCase(HEIGHT)) return true; } return false; } // resources private static PanmirrorDialogsResources RES = PanmirrorDialogsResources.INSTANCE; // UI utility functions from panmirror private final PanmirrorUIToolsImage uiTools_ = new PanmirrorUITools().image; // original image/container dimensions private PanmirrorImageDimensions dims_; // current 'edited' values for size props private Double widthProp_ = null; private Double heightProp_ = null; private String unitsProp_ = null; // track previous units for conversions private String prevUnits_; // widgets private final Widget mainWidget_; private final PanmirrorImageChooser url_; private final NumericTextBox width_; private final NumericTextBox height_; private final FormLabel heightAuto_; private final ListBox units_; private final CheckBox lockRatio_; private final TextBox title_; private final TextBox alt_; private final PanmirrorEditAttrWidget editAttr_; private static final String WIDTH = "width"; private static final String HEIGHT = "height"; }
/** * This is only generated once! It will never be overwritten. * You can (and have to!) safely modify it by hand. */ package edu.columbia.gemma.common.description; import java.util.Collection; /** * @see edu.columbia.gemma.common.description.BibliographicReferenceService */ public class BibliographicReferenceServiceImpl extends edu.columbia.gemma.common.description.BibliographicReferenceServiceBase { /** * @see edu.columbia.gemma.common.description.BibliographicReferenceService#getAllBibliographicReferences() */ protected java.util.Collection handleGetAllBibliographicReferences() throws java.lang.Exception { //@todo implement protected java.util.Collection handleGetAllBibliographicReferences() return null; } /** * @see edu.columbia.gemma.common.description.BibliographicReferenceService#saveBibliographicReference(edu.columbia.gemma.common.description.BibliographicReference) */ protected void handleSaveBibliographicReference(edu.columbia.gemma.common.description.BibliographicReference BibliographicReference) throws java.lang.Exception { Collection col = getBibliographicReferenceDao().findByTitle(BibliographicReference.getTitle()); if (col.size()==0) getBibliographicReferenceDao().create(BibliographicReference); } /** * @see edu.columbia.gemma.common.description.BibliographicReferenceService#findByExternalId(java.lang.String) */ protected edu.columbia.gemma.common.description.BibliographicReference handleFindByExternalId(java.lang.String id) throws java.lang.Exception { //@todo implement protected edu.columbia.gemma.common.description.BibliographicReference handleFindByExternalId(java.lang.String id) return null; } /** * @see edu.columbia.gemma.common.description.BibliographicReferenceService#findByExternalId(java.lang.String, java.lang.String) */ protected edu.columbia.gemma.common.description.BibliographicReference handleFindByExternalId(java.lang.String id, java.lang.String databaseName) throws java.lang.Exception { //@todo implement protected edu.columbia.gemma.common.description.BibliographicReference handleFindByExternalId(java.lang.String id, java.lang.String databaseName) return null; } }
package localsearch.domainspecific.vehiclerouting.apps.sharedaride.Search; import java.util.ArrayList; import java.util.Collections; import java.util.Random; import java.util.logging.Level; import localsearch.domainspecific.vehiclerouting.apps.sharedaride.ShareARide; import localsearch.domainspecific.vehiclerouting.apps.sharedaride.SolutionShareARide; import localsearch.domainspecific.vehiclerouting.vrp.ConstraintSystemVR; import localsearch.domainspecific.vehiclerouting.vrp.IFunctionVR; import localsearch.domainspecific.vehiclerouting.vrp.VRManager; import localsearch.domainspecific.vehiclerouting.vrp.VarRoutesVR; import localsearch.domainspecific.vehiclerouting.vrp.entities.ArcWeightsManager; import localsearch.domainspecific.vehiclerouting.vrp.entities.Point; import localsearch.domainspecific.vehiclerouting.vrp.invariants.EarliestArrivalTimeVR; public class ALNSwithSA { private VRManager mgr; private VarRoutesVR XR; private ConstraintSystemVR S; private IFunctionVR objective; private EarliestArrivalTimeVR eat; private ArcWeightsManager awm; private int nRemovalOperators=7; private int nInsertionOperators=6; //parameters private int lower_removal = (int) 1*(ShareARide.nRequest)/100; private int upper_removal = (int) 10*(ShareARide.nRequest)/100; private int sigma1 = 5; private int sigma2 = 3; private int sigma3 = 1; private double rp = 0.1; private int nw = 5; private double shaw1st = 0.5; private double shaw2nd = 0.2; private double shaw3rd = 0.1; private double temperature = 200; private double cooling_rate = 0.9995; //private double shaw4th = 0.2; public ALNSwithSA(VRManager mgr, IFunctionVR objective, ConstraintSystemVR S, EarliestArrivalTimeVR eat, ArcWeightsManager awm){ this.mgr = mgr; this.objective = objective; this.XR = mgr.getVarRoutesVR(); this.S = S; this.eat = eat; this.awm = awm; } public SolutionShareARide search(int maxIter, int timeLimit){ //insertion operators selection probabilities double[] pti = new double[nInsertionOperators]; //removal operators selection probabilities double[] ptd = new double[nRemovalOperators]; //wi - number of times used during last iteration int[] wi = new int[nInsertionOperators]; int[] wd = new int[nRemovalOperators]; //pi_i - score of operator int[] si = new int[nInsertionOperators]; int[] sd = new int[nRemovalOperators]; //init probabilites for(int i=0; i<nInsertionOperators; i++){ pti[i] = 1.0/nInsertionOperators; wi[i] = 1; si[i] = 0; } for(int i=0; i<nRemovalOperators; i++){ ptd[i] = 1.0/nRemovalOperators; wd[i] = 1; sd[i] = 0; } int it = 0; double best_cost = objective.getValue(); SolutionShareARide best_solution = new SolutionShareARide(XR, ShareARide.rejectPoints, ShareARide.rejectPickupGoods, ShareARide.rejectPickupPeoples, best_cost); ShareARide.LOGGER.log(Level.INFO, "start search best_solution has cost = "+best_solution.get_cost()+" number of rejected request of goods = "+best_solution.get_rejectPickupGoods().size()+" number of rejected request of peoples = "+best_solution.get_rejectPickupPeoples().size()); double start_search_time = System.currentTimeMillis(); double search_time = 0.0; while( search_time < timeLimit && it++ < maxIter){ double current_cost = objective.getValue(); SolutionShareARide current_solution = new SolutionShareARide(XR, ShareARide.rejectPoints, ShareARide.rejectPickupGoods, ShareARide.rejectPickupPeoples, current_cost); ShareARide.LOGGER.log(Level.INFO, "Iter "+it+" current_solution has cost = "+current_solution.get_cost()+" number of rejected request of goods = "+current_solution.get_rejectPickupGoods().size()+" number of rejected request of peoples = "+current_solution.get_rejectPickupPeoples().size()); int i_selected_removal = get_operator(ptd); wd[i_selected_removal]++; /* * Select remove operator */ ShareARide.LOGGER.log(Level.INFO,"selected removal operator = "+i_selected_removal); //long timeRemoveStart = System.currentTimeMillis(); switch(i_selected_removal){ case 0: random_removal(); break; case 1: route_removal(); break; case 2: late_arrival_removal(); break; case 3: shaw_removal(); break; case 4: proximity_based_removal(); break; case 5: time_based_removal(); break; case 6: worst_removal(); break; } //long timeRemoveEnd = System.currentTimeMillis(); //long timeRemove = timeRemoveEnd - timeRemoveStart; int i_selected_insertion = get_operator(pti); wi[i_selected_insertion]++; ShareARide.LOGGER.log(Level.INFO,"selected insertion operator = "+i_selected_insertion); /* * Select insertion operator */ //long timeInsertStart = Sy switch(i_selected_insertion){ case 0: greedy_insertion(); break; case 1: greedy_insertion_noise_function(); break; case 2: second_best_insertion(); break; case 3: second_best_insertion_noise_function(); break; case 4: regret_n_insertion(2); break; case 5: regret_n_insertion(3); break; } double new_cost = objective.getValue(); ShareARide.LOGGER.log(Level.INFO,"Iter "+it+" new_solution: has cost = "+new_cost+" number of rejected request of goods = "+ShareARide.rejectPickupGoods.size()+" number of rejected request of peoples = "+ShareARide.rejectPickupPeoples.size()); /* * if new solution has cost better than current solution * update current solution = new solution * if new solution has best cost * update best cost */ int new_nb_reject_points = ShareARide.rejectPickupGoods.size()+ShareARide.rejectPickupPeoples.size(); int current_nb_reject_points = current_solution.get_rejectPickupGoods().size() + current_solution.get_rejectPickupPeoples().size(); if( new_nb_reject_points < current_nb_reject_points || (new_nb_reject_points == current_nb_reject_points && new_cost < current_cost)){ int best_nb_reject_points = best_solution.get_rejectPickupGoods().size()+best_solution.get_rejectPickupPeoples().size(); if(new_nb_reject_points < best_nb_reject_points || (new_nb_reject_points == best_nb_reject_points && new_cost < best_cost)){ best_cost = new_cost; best_solution = new SolutionShareARide(XR, ShareARide.rejectPoints, ShareARide.rejectPickupGoods, ShareARide.rejectPickupPeoples, best_cost); ShareARide.LOGGER.log(Level.INFO,"Iter "+it+" find the best solution with number of rejected of goods = "+best_solution.get_rejectPickupGoods().size()+" number of rejected of peoples = "+best_solution.get_rejectPickupPeoples().size()+" cost = "+best_solution.get_cost()); si[i_selected_insertion] += sigma1; sd[i_selected_removal] += sigma1; }else{ si[i_selected_insertion] += sigma2; sd[i_selected_removal] += sigma2; } } /* * if new solution has cost worst than current solution * because XR is new solution * copy current current solution to new solution if don't change solution */ else{ si[i_selected_insertion] += sigma3; sd[i_selected_removal] += sigma3; double v = Math.exp(-(new_cost-current_cost)/temperature); double r = Math.random(); if(r >= v){ ShareARide.LOGGER.log(Level.INFO,"The cost did not improve and reverse solution back to current solution"); current_solution.copy2XR(XR); ShareARide.rejectPoints = current_solution.get_rejectPoints(); ShareARide.rejectPickupGoods = current_solution.get_rejectPickupGoods(); ShareARide.rejectPickupPeoples = current_solution.get_rejectPickupPeoples(); } } temperature = cooling_rate*temperature; //update probabilities if(it % nw == 0){ for(int i=0; i<nInsertionOperators; i++){ pti[i] = pti[i]*(1-rp) + rp*si[i]/wi[i]; //wi[i] = 1; //si[i] = 0; } for(int i=0; i<nRemovalOperators; i++){ ptd[i] = ptd[i]*(1-rp) + rp*sd[i]/wd[i]; //wd[i] = 1; //sd[i] = 0; } } search_time = System.currentTimeMillis() - start_search_time; } return best_solution; } private void random_removal(){ Random R = new Random(); int nRemove = R.nextInt(upper_removal-lower_removal+1) + lower_removal; ShareARide.LOGGER.log(Level.INFO,"number of request removed = "+nRemove); ArrayList<Point> clientPoints = XR.getClientPoints(); Collections.shuffle(clientPoints); int inRemove = 0 ; for(int i=0; i<clientPoints.size(); i++){ if(inRemove == nRemove) break; Point pr1 = clientPoints.get(i); if(ShareARide.rejectPoints.contains(pr1)) continue; //out.println("pr1 = "+pr1.getID()); Point pr2 = ShareARide.pickup2Delivery.get(pr1); boolean pr2IsDelivery = true; if(pr2 == null){ //out.println("pr2 null"); pr2 = ShareARide.delivery2Pickup.get(pr1); pr2IsDelivery = false; } //System.out.println("pr2 = "+pr2.getID()); /*if(S.evaluateRemoveTwoPoints(pr1,pr2) != 0){ System.out.println("iter "+i+" invalid "+pr1.getID()+" "+pr2.getID()); continue; }*/ //System.out.println("iter "+i+" Remove "+pr1.getID()+" "+pr2.getID()); inRemove++; if(pr2IsDelivery){ mgr.performRemoveTwoPoints(pr1, pr2); }else{ mgr.performRemoveTwoPoints(pr2, pr1); } ShareARide.rejectPoints.add(pr1); ShareARide.rejectPoints.add(pr2); if(pr2IsDelivery){ if(ShareARide.pickup2DeliveryOfPeople.containsKey(pr1)){ ShareARide.rejectPickupPeoples.add(pr1); }else if(ShareARide.pickup2DeliveryOfGood.containsKey(pr1)){ ShareARide.rejectPickupGoods.add(pr1); }else{ ShareARide.LOGGER.log(Level.INFO,"Exception point removed do not in pickup Good and people"); System.exit(-1); } }else{ if(ShareARide.pickup2DeliveryOfPeople.containsKey(pr2)){ ShareARide.rejectPickupPeoples.add(pr2); }else if(ShareARide.pickup2DeliveryOfGood.containsKey(pr2)){ ShareARide.rejectPickupGoods.add(pr2); }else{ ShareARide.LOGGER.log(Level.INFO,"Exception point removed do not in pickup Good and people"); System.exit(-1); } } } if(inRemove == 0){ ShareARide.LOGGER.log(Level.INFO,"Can't remove any client points"); } //System.out.println("random_removal done"); } private void route_removal(){ int K = XR.getNbRoutes(); Random R = new Random(); int iRouteRemoval = R.nextInt(K)+1; ShareARide.LOGGER.log(Level.INFO,"index of route removed = "+iRouteRemoval); Point x = XR.getStartingPointOfRoute(iRouteRemoval); Point next_x = XR.next(x); while(next_x != XR.getTerminatingPointOfRoute(iRouteRemoval)){ x = next_x; next_x = XR.next(x); ShareARide.rejectPoints.add(x); if(ShareARide.pickup2Delivery.containsKey(x)){ if(ShareARide.pickup2DeliveryOfPeople.containsKey(x)){ ShareARide.rejectPickupPeoples.add(x); }else if(ShareARide.pickup2DeliveryOfGood.containsKey(x)){ ShareARide.rejectPickupGoods.add(x); }else{ ShareARide.LOGGER.log(Level.INFO,"Exception point removed do not in pickup Good and people"); System.exit(-1); } } mgr.performRemoveOnePoint(x); } } private void late_arrival_removal(){ Random R = new Random(); int nRemove = R.nextInt(upper_removal-lower_removal+1) + lower_removal; ShareARide.LOGGER.log(Level.INFO,"number of request removed = "+nRemove); int iRemove = 0; while(iRemove++ != nRemove){ double deviationMax = Double.MIN_VALUE; Point removedPickup = null; Point removedDelivery = null; for(int k=1; k<=XR.getNbRoutes(); k++){ Point x = XR.getStartingPointOfRoute(k); for(x = XR.next(x); x != XR.getTerminatingPointOfRoute(k); x = XR.next(x)){ Point dX = ShareARide.pickup2Delivery.get(x); if(dX == null){ continue; } double arrivalTime = eat.getEarliestArrivalTime(XR.prev(x))+ ShareARide.serviceDuration.get(XR.prev(x))+ awm.getDistance(XR.prev(x), x); double serviceTime = 1.0*ShareARide.earliestAllowedArrivalTime.get(x); serviceTime = arrivalTime > serviceTime ? arrivalTime : serviceTime; double depatureTime = serviceTime + ShareARide.serviceDuration.get(x); double arrivalTimeD = eat.getEarliestArrivalTime(XR.prev(dX))+ ShareARide.serviceDuration.get(XR.prev(dX))+ awm.getDistance(XR.prev(dX), dX); double serviceTimeD = 1.0*ShareARide.earliestAllowedArrivalTime.get(dX); serviceTime = arrivalTimeD > serviceTimeD ? arrivalTimeD : serviceTimeD; double depatureTimeD = serviceTimeD + ShareARide.serviceDuration.get(dX); double deviation = depatureTime - arrivalTime + depatureTimeD - arrivalTimeD; if(deviation > deviationMax){ deviationMax = deviation; removedPickup = x; removedDelivery = dX; } } } ShareARide.rejectPoints.add(removedDelivery); ShareARide.rejectPoints.add(removedPickup); if(ShareARide.pickup2DeliveryOfPeople.containsKey(removedPickup)){ ShareARide.rejectPickupPeoples.add(removedPickup); }else if(ShareARide.pickup2DeliveryOfGood.containsKey(removedPickup)){ ShareARide.rejectPickupGoods.add(removedPickup); }else{ ShareARide.LOGGER.log(Level.INFO,"Exception point removed do not in pickup Good and people"); System.exit(-1); } mgr.performRemoveTwoPoints(removedPickup, removedDelivery); } } /* private void worst_distance_removal(){ Random R = new Random(); int nRemove = R.nextInt(upper_removal-lower_removal+1) + lower_removal; ShareARide.LOGGER.log(Level.INFO,"number of request removed = "+nRemove); int iRemove = 0; while(iRemove++ != nRemove){ double distanceMax = Double.MIN_VALUE; Point removedPickup = null; Point removedDelivery = null; for(int k=1; k<=XR.getNbRoutes(); k++){ Point x = XR.getStartingPointOfRoute(k); for(x = XR.next(x); x != XR.getTerminatingPointOfRoute(k); x = XR.next(x)){ Point dX = ShareARide.pickup2Delivery.get(x); if(dX == null){ continue; } double distance = awm.getDistance(x, XR.prev(x)) + awm.getDistance(x, XR.next(x)) + awm.getDistance(dX, XR.prev(dX)) + awm.getDistance(dX, XR.next(dX)); if(distance > distanceMax){ distanceMax = distance; removedPickup = x; removedDelivery = dX; } } } ShareARide.rejectPoints.add(removedDelivery); ShareARide.rejectPoints.add(removedPickup); ShareARide.rejectPickup.add(removedPickup); ShareARide.rejectDelivery.add(removedDelivery); mgr.performRemoveTwoPoints(removedPickup, removedDelivery); } } */ private void shaw_removal(){ Random R = new Random(); int nRemove = R.nextInt(upper_removal-lower_removal+1) + lower_removal; ShareARide.LOGGER.log(Level.INFO,"number of request removed = "+nRemove); ArrayList<Point> clientPoints = XR.getClientPoints(); int ipRemove; /* * select randomly request r1 and its delivery dr1 */ Point r1; do{ ipRemove = R.nextInt(clientPoints.size()); r1 = clientPoints.get(ipRemove); }while(ShareARide.rejectPoints.contains(r1)); Point dr1; boolean isPickup = ShareARide.pickup2Delivery.containsKey(r1); if(isPickup){ dr1 = ShareARide.pickup2Delivery.get(r1); }else{ Point tmp = ShareARide.delivery2Pickup.get(r1); dr1 = r1; r1 = tmp; } /* * Remove request most related with r1 */ int inRemove = 0; while(inRemove++ != nRemove){ Point removedPickup = null; Point removedDelivery = null; double relatedMin = Double.MAX_VALUE; int routeOfR1 = XR.route(r1); /* * Compute arrival time at request r1 and its delivery dr1 */ double arrivalTimeR1 = eat.getEarliestArrivalTime(XR.prev(r1))+ ShareARide.serviceDuration.get(XR.prev(r1))+ awm.getDistance(XR.prev(r1), r1); double serviceTimeR1 = 1.0*ShareARide.earliestAllowedArrivalTime.get(r1); serviceTimeR1 = arrivalTimeR1 > serviceTimeR1 ? arrivalTimeR1 : serviceTimeR1; double depatureTimeR1 = serviceTimeR1 + ShareARide.serviceDuration.get(r1); double arrivalTimeDR1 = eat.getEarliestArrivalTime(XR.prev(dr1))+ ShareARide.serviceDuration.get(XR.prev(dr1))+ awm.getDistance(XR.prev(dr1), dr1); double serviceTimeDR1 = 1.0*ShareARide.earliestAllowedArrivalTime.get(dr1); serviceTimeDR1 = arrivalTimeDR1 > serviceTimeDR1 ? arrivalTimeDR1 : serviceTimeDR1; double depatureTimeDR1 = serviceTimeDR1 + ShareARide.serviceDuration.get(dr1); ShareARide.rejectPoints.add(r1); ShareARide.rejectPoints.add(dr1); if(ShareARide.pickup2DeliveryOfPeople.containsKey(r1)){ ShareARide.rejectPickupPeoples.add(r1); }else if(ShareARide.pickup2DeliveryOfGood.containsKey(r1)){ ShareARide.rejectPickupGoods.add(r1); }else{ ShareARide.LOGGER.log(Level.INFO,"Exception point removed do not in pickup Good and people"); System.exit(-1); } mgr.performRemoveTwoPoints(r1, dr1); /* * find the request is the most related with r1 */ for(int k=1; k<=XR.getNbRoutes(); k++){ Point x = XR.getStartingPointOfRoute(k); for(x = XR.next(x); x != XR.getTerminatingPointOfRoute(k); x = XR.next(x)){ Point dX = ShareARide.pickup2Delivery.get(x); if(dX == null) continue; /* * Compute arrival time of x and its delivery dX */ double arrivalTimeX = eat.getEarliestArrivalTime(XR.prev(x))+ ShareARide.serviceDuration.get(XR.prev(x))+ awm.getDistance(XR.prev(x), x); double serviceTimeX = 1.0*ShareARide.earliestAllowedArrivalTime.get(x); serviceTimeX = arrivalTimeX > serviceTimeX ? arrivalTimeX : serviceTimeX; double depatureTimeX = serviceTimeX + ShareARide.serviceDuration.get(x); double arrivalTimeDX = eat.getEarliestArrivalTime(XR.prev(dX))+ ShareARide.serviceDuration.get(XR.prev(dX))+ awm.getDistance(XR.prev(dX), dX); double serviceTimeDX = 1.0*ShareARide.earliestAllowedArrivalTime.get(dX); serviceTimeDX = arrivalTimeDX > serviceTimeDX ? arrivalTimeDX : serviceTimeDX; double depatureTimeDX = serviceTimeDX + ShareARide.serviceDuration.get(dX); /* * Compute related between r1 and x */ int lr1x; if(routeOfR1 == k){ lr1x = 1; }else{ lr1x = -1; } double related = shaw1st*(awm.getDistance(r1, x) + awm.getDistance(dX, dr1))+ shaw2nd*(Math.abs(depatureTimeR1-depatureTimeX) + Math.abs(depatureTimeDX-depatureTimeDR1))+ shaw3rd*lr1x; if(related < relatedMin){ relatedMin = related; removedPickup = x; removedDelivery = dX; } } } r1 = removedPickup; dr1 = removedDelivery; } } private void proximity_based_removal(){ Random R = new Random(); int nRemove = R.nextInt(upper_removal-lower_removal+1) + lower_removal; ShareARide.LOGGER.log(Level.INFO,"number of request removed = "+nRemove); ArrayList<Point> clientPoints = XR.getClientPoints(); int ipRemove; /* * select randomly request r1 and its delivery dr1 */ Point r1; do{ ipRemove = R.nextInt(clientPoints.size()); r1 = clientPoints.get(ipRemove); }while(ShareARide.rejectPoints.contains(r1)); Point dr1; boolean isPickup = ShareARide.pickup2Delivery.containsKey(r1); if(isPickup){ dr1 = ShareARide.pickup2Delivery.get(r1); }else{ Point tmp = ShareARide.delivery2Pickup.get(r1); dr1 = r1; r1 = tmp; } /* * Remove request most related with r1 */ int inRemove = 0; while(inRemove++ != nRemove){ Point removedPickup = null; Point removedDelivery = null; double relatedMin = Double.MAX_VALUE; ShareARide.rejectPoints.add(r1); ShareARide.rejectPoints.add(dr1); if(ShareARide.pickup2DeliveryOfPeople.containsKey(r1)){ ShareARide.rejectPickupPeoples.add(r1); }else if(ShareARide.pickup2DeliveryOfGood.containsKey(r1)){ ShareARide.rejectPickupGoods.add(r1); }else{ ShareARide.LOGGER.log(Level.INFO,"Exception point removed do not in pickup Good and people"); System.exit(-1); } mgr.performRemoveTwoPoints(r1, dr1); /* * find the request is the most related with r1 */ for(int k=1; k<=XR.getNbRoutes(); k++){ Point x = XR.getStartingPointOfRoute(k); for(x = XR.next(x); x != XR.getTerminatingPointOfRoute(k); x = XR.next(x)){ Point dX = ShareARide.pickup2Delivery.get(x); if(dX == null) continue; /* * Compute related between r1 and x */ double related = shaw1st*(awm.getDistance(r1, x) + awm.getDistance(dX, dr1)); if(related < relatedMin){ relatedMin = related; removedPickup = x; removedDelivery = dX; } } } r1 = removedPickup; dr1 = removedDelivery; } } private void time_based_removal(){ Random R = new Random(); int nRemove = R.nextInt(upper_removal-lower_removal+1) + lower_removal; ShareARide.LOGGER.log(Level.INFO,"number of request removed = "+nRemove); ArrayList<Point> clientPoints = XR.getClientPoints(); int ipRemove; /* * select randomly request r1 and its delivery dr1 */ Point r1; do{ ipRemove = R.nextInt(clientPoints.size()); r1 = clientPoints.get(ipRemove); }while(ShareARide.rejectPoints.contains(r1)); Point dr1; boolean isPickup = ShareARide.pickup2Delivery.containsKey(r1); if(isPickup){ dr1 = ShareARide.pickup2Delivery.get(r1); }else{ Point tmp = ShareARide.delivery2Pickup.get(r1); dr1 = r1; r1 = tmp; } /* * Remove request most related with r1 */ int inRemove = 0; while(inRemove++ != nRemove){ Point removedPickup = null; Point removedDelivery = null; double relatedMin = Double.MAX_VALUE; /* * Compute arrival time at request r1 and its delivery dr1 */ double arrivalTimeR1 = eat.getEarliestArrivalTime(XR.prev(r1))+ ShareARide.serviceDuration.get(XR.prev(r1))+ awm.getDistance(XR.prev(r1), r1); double serviceTimeR1 = 1.0*ShareARide.earliestAllowedArrivalTime.get(r1); serviceTimeR1 = arrivalTimeR1 > serviceTimeR1 ? arrivalTimeR1 : serviceTimeR1; double depatureTimeR1 = serviceTimeR1 + ShareARide.serviceDuration.get(r1); double arrivalTimeDR1 = eat.getEarliestArrivalTime(XR.prev(dr1))+ ShareARide.serviceDuration.get(XR.prev(dr1))+ awm.getDistance(XR.prev(dr1), dr1); double serviceTimeDR1 = 1.0*ShareARide.earliestAllowedArrivalTime.get(dr1); serviceTimeDR1 = arrivalTimeDR1 > serviceTimeDR1 ? arrivalTimeDR1 : serviceTimeDR1; double depatureTimeDR1 = serviceTimeDR1 + ShareARide.serviceDuration.get(dr1); ShareARide.rejectPoints.add(r1); ShareARide.rejectPoints.add(dr1); if(ShareARide.pickup2DeliveryOfPeople.containsKey(r1)){ ShareARide.rejectPickupPeoples.add(r1); }else if(ShareARide.pickup2DeliveryOfGood.containsKey(r1)){ ShareARide.rejectPickupGoods.add(r1); }else{ ShareARide.LOGGER.log(Level.INFO,"Exception point removed do not in pickup Good and people"); System.exit(-1); } mgr.performRemoveTwoPoints(r1, dr1); /* * find the request is the most related with r1 */ for(int k=1; k<=XR.getNbRoutes(); k++){ Point x = XR.getStartingPointOfRoute(k); for(x = XR.next(x); x != XR.getTerminatingPointOfRoute(k); x = XR.next(x)){ Point dX = ShareARide.pickup2Delivery.get(x); if(dX == null) continue; /* * Compute arrival time of x and its delivery dX */ double arrivalTimeX = eat.getEarliestArrivalTime(XR.prev(x))+ ShareARide.serviceDuration.get(XR.prev(x))+ awm.getDistance(XR.prev(x), x); double serviceTimeX = 1.0*ShareARide.earliestAllowedArrivalTime.get(x); serviceTimeX = arrivalTimeX > serviceTimeX ? arrivalTimeX : serviceTimeX; double depatureTimeX = serviceTimeX + ShareARide.serviceDuration.get(x); double arrivalTimeDX = eat.getEarliestArrivalTime(XR.prev(dX))+ ShareARide.serviceDuration.get(XR.prev(dX))+ awm.getDistance(XR.prev(dX), dX); double serviceTimeDX = 1.0*ShareARide.earliestAllowedArrivalTime.get(dX); serviceTimeDX = arrivalTimeDX > serviceTimeDX ? arrivalTimeDX : serviceTimeDX; double depatureTimeDX = serviceTimeDX + ShareARide.serviceDuration.get(dX); /* * Compute related between r1 and x */ double related = shaw2nd*(Math.abs(depatureTimeR1-depatureTimeX) + Math.abs(depatureTimeDX-depatureTimeDR1)); if(related < relatedMin){ relatedMin = related; removedPickup = x; removedDelivery = dX; } } } r1 = removedPickup; dr1 = removedDelivery; } } private void worst_removal(){ Random R = new Random(); int nRemove = R.nextInt(upper_removal-lower_removal+1) + lower_removal; ShareARide.LOGGER.log(Level.INFO,"number of request removed = "+nRemove); int inRemove = 0; while(inRemove++ != nRemove){ double maxCost = Double.MIN_VALUE; Point removedPickup = null; Point removedDelivery = null; for(int k=1; k<=XR.getNbRoutes(); k++){ Point x = XR.getStartingPointOfRoute(k); for(x = XR.next(x); x != XR.getTerminatingPointOfRoute(k); x = XR.next(x)){ Point dX = ShareARide.pickup2Delivery.get(x); if(dX == null){ continue; } double cost = objective.evaluateRemoveTwoPoints(x, dX); if(cost > maxCost){ maxCost = cost; removedPickup = x; removedDelivery = dX; } } } ShareARide.rejectPoints.add(removedDelivery); ShareARide.rejectPoints.add(removedPickup); if(ShareARide.pickup2DeliveryOfPeople.containsKey(removedPickup)){ ShareARide.rejectPickupPeoples.add(removedPickup); }else if(ShareARide.pickup2DeliveryOfGood.containsKey(removedPickup)){ ShareARide.rejectPickupGoods.add(removedPickup); }else{ ShareARide.LOGGER.log(Level.INFO,"Exception point removed do not in pickup Good and people"); System.exit(-1); } mgr.performRemoveTwoPoints(removedPickup, removedDelivery); } } private void greedy_insertion(){ ShareARide.LOGGER.log(Level.INFO,"Inserting peoples to route"); for(int i=0; i<ShareARide.rejectPickupPeoples.size(); i++){ Point pickup = ShareARide.rejectPickupPeoples.get(i); Point delivery = ShareARide.pickup2Delivery.get(pickup); Point best_insertion_pickup = null; Point best_insertion_delivery = null; double best_objective = Double.MAX_VALUE; for(int k=1; k<=XR.getNbRoutes(); k++){ for(Point p = XR.getStartingPointOfRoute(k); p != XR.getTerminatingPointOfRoute(k); p = XR.next(p)){ //check constraint if(ShareARide.pickup2DeliveryOfPeople.containsKey(p) || S.evaluateAddOnePoint(pickup, p) > 0) continue; if(S.evaluateAddTwoPoints(pickup, p, delivery, p) == 0){ //cost improve double cost = objective.evaluateAddTwoPoints(pickup, p, delivery, p); if( cost < best_objective){ best_objective = cost; best_insertion_pickup = p; best_insertion_delivery = p; } } } } if(best_insertion_pickup != null && best_insertion_delivery != null){ mgr.performAddTwoPoints(pickup, best_insertion_pickup, delivery, best_insertion_delivery); ShareARide.rejectPickupPeoples.remove(pickup); ShareARide.rejectPoints.remove(pickup); ShareARide.rejectPoints.remove(delivery); i } } ShareARide.LOGGER.log(Level.INFO,"Inserting goods to route"); for(int i=0; i<ShareARide.rejectPickupGoods.size(); i++){ Point pickup = ShareARide.rejectPickupGoods.get(i); Point delivery = ShareARide.pickup2Delivery.get(pickup); Point best_insertion_pickup = null; Point best_insertion_delivery = null; double best_objective = Double.MAX_VALUE; for(int k=1; k<=XR.getNbRoutes(); k++){ for(Point p = XR.getStartingPointOfRoute(k); p != XR.getTerminatingPointOfRoute(k); p = XR.next(p)){ //check constraint if(ShareARide.pickup2DeliveryOfPeople.containsKey(p) || S.evaluateAddOnePoint(pickup, p) > 0) continue; for(Point q = p; q != XR.getTerminatingPointOfRoute(k); q = XR.next(q)){ if(ShareARide.pickup2DeliveryOfPeople.containsKey(q) || S.evaluateAddOnePoint(delivery, q) > 0) continue; if(S.evaluateAddTwoPoints(pickup, p, delivery, q) == 0){ double cost = objective.evaluateAddTwoPoints(pickup, p, delivery, q); if(cost < best_objective){ best_objective = cost; best_insertion_pickup = p; best_insertion_delivery = q; } } } } } if(best_insertion_pickup != null && best_insertion_delivery != null){ mgr.performAddTwoPoints(pickup, best_insertion_pickup, delivery, best_insertion_delivery); ShareARide.rejectPickupGoods.remove(pickup); ShareARide.rejectPoints.remove(pickup); ShareARide.rejectPoints.remove(delivery); i } } } private void greedy_insertion_noise_function(){ ShareARide.LOGGER.log(Level.INFO,"Inserting peoples to route"); for(int i=0; i<ShareARide.rejectPickupPeoples.size(); i++){ Point pickup = ShareARide.rejectPickupPeoples.get(i); Point delivery = ShareARide.pickup2Delivery.get(pickup); Point best_insertion_pickup = null; Point best_insertion_delivery = null; double best_objective = Double.MAX_VALUE; for(int k=1; k<=XR.getNbRoutes(); k++){ for(Point p = XR.getStartingPointOfRoute(k); p != XR.getTerminatingPointOfRoute(k); p = XR.next(p)){ //check constraint if(ShareARide.pickup2DeliveryOfPeople.containsKey(p) || S.evaluateAddOnePoint(pickup, p) > 0) continue; //check constraint if(S.evaluateAddTwoPoints(pickup, p, delivery, p) == 0){ //cost improve double cost = objective.evaluateAddTwoPoints(pickup, p, delivery, p); double r = Math.random()*2-1; cost += ShareARide.MAX_DISTANCE*0.1*r; if( cost < best_objective){ best_objective = cost; best_insertion_pickup = p; best_insertion_delivery = p; } } } } if(best_insertion_pickup != null && best_insertion_delivery != null){ mgr.performAddTwoPoints(pickup, best_insertion_pickup, delivery, best_insertion_delivery); ShareARide.rejectPickupPeoples.remove(pickup); ShareARide.rejectPoints.remove(pickup); ShareARide.rejectPoints.remove(delivery); i } } ShareARide.LOGGER.log(Level.INFO,"Inserting goods to route"); for(int i=0; i<ShareARide.rejectPickupGoods.size(); i++){ Point pickup = ShareARide.rejectPickupGoods.get(i); Point delivery = ShareARide.pickup2Delivery.get(pickup); Point best_insertion_pickup = null; Point best_insertion_delivery = null; double best_objective = Double.MAX_VALUE; for(int k=1; k<=XR.getNbRoutes(); k++){ for(Point p = XR.getStartingPointOfRoute(k); p != XR.getTerminatingPointOfRoute(k); p = XR.next(p)){ //check constraint if(ShareARide.pickup2DeliveryOfPeople.containsKey(p) || S.evaluateAddOnePoint(pickup, p) > 0) continue; for(Point q = p; q != XR.getTerminatingPointOfRoute(k); q = XR.next(q)){ if(ShareARide.pickup2DeliveryOfPeople.containsKey(q) || S.evaluateAddOnePoint(delivery, q) > 0) continue; if(S.evaluateAddTwoPoints(pickup, p, delivery, q) == 0){ double cost = objective.evaluateAddTwoPoints(pickup, p, delivery, q); double r = Math.random()*2-1; cost += ShareARide.MAX_DISTANCE*0.1*r; if(cost < best_objective){ best_objective = cost; best_insertion_pickup = p; best_insertion_delivery = q; } } } } } if(best_insertion_pickup != null && best_insertion_delivery != null){ mgr.performAddTwoPoints(pickup, best_insertion_pickup, delivery, best_insertion_delivery); ShareARide.rejectPickupGoods.remove(pickup); ShareARide.rejectPoints.remove(pickup); ShareARide.rejectPoints.remove(delivery); i } } } private void second_best_insertion(){ ShareARide.LOGGER.log(Level.INFO,"Inserting peoples to route"); for(int i=0; i<ShareARide.rejectPickupPeoples.size(); i++){ Point pickup = ShareARide.rejectPickupPeoples.get(i); Point delivery = ShareARide.pickup2Delivery.get(pickup); Point second_best_insertion_pickup = null; Point second_best_insertion_delivery = null; double best_objective = Double.MAX_VALUE; double second_best_objective = Double.MAX_VALUE; for(int k=1; k<=XR.getNbRoutes(); k++){ for(Point p = XR.getStartingPointOfRoute(k); p != XR.getTerminatingPointOfRoute(k); p = XR.next(p)){ //check constraint if(ShareARide.pickup2DeliveryOfPeople.containsKey(p) || S.evaluateAddOnePoint(pickup, p) > 0) continue; if(S.evaluateAddTwoPoints(pickup, p, delivery, p) == 0){ //cost improve double cost = objective.evaluateAddTwoPoints(pickup, p, delivery, p); if( cost <= best_objective){ second_best_objective = best_objective; best_objective = cost; }else{ if(cost < second_best_objective){ second_best_objective = cost; second_best_insertion_pickup = p; second_best_insertion_delivery = p; } } } } } if(second_best_insertion_pickup != null && second_best_insertion_delivery != null){ mgr.performAddTwoPoints(pickup, second_best_insertion_pickup, delivery, second_best_insertion_delivery); ShareARide.rejectPickupPeoples.remove(pickup); ShareARide.rejectPoints.remove(pickup); ShareARide.rejectPoints.remove(delivery); i } } ShareARide.LOGGER.log(Level.INFO,"Inserting goods to route"); for(int i=0; i<ShareARide.rejectPickupGoods.size(); i++){ Point pickup = ShareARide.rejectPickupGoods.get(i); Point delivery = ShareARide.pickup2Delivery.get(pickup); Point second_best_insertion_pickup = null; Point second_best_insertion_delivery = null; double best_objective = Double.MAX_VALUE; double second_best_objective = Double.MAX_VALUE; for(int k=1; k<=XR.getNbRoutes(); k++){ for(Point p = XR.getStartingPointOfRoute(k); p != XR.getTerminatingPointOfRoute(k); p = XR.next(p)){ //check constraint if(ShareARide.pickup2DeliveryOfPeople.containsKey(p) || S.evaluateAddOnePoint(pickup, p) > 0) continue; for(Point q = p; q != XR.getTerminatingPointOfRoute(k); q = XR.next(q)){ if(ShareARide.pickup2DeliveryOfPeople.containsKey(q) || S.evaluateAddOnePoint(delivery, q) > 0) continue; if(S.evaluateAddTwoPoints(pickup, p, delivery, q) == 0){ double cost = objective.evaluateAddTwoPoints(pickup, p, delivery, q); if(cost <= best_objective){ second_best_objective = best_objective; best_objective = cost; }else{ if(cost < second_best_objective){ second_best_objective = cost; second_best_insertion_pickup = p; second_best_insertion_delivery = q; } } } } } } if(second_best_insertion_pickup != null && second_best_insertion_delivery != null){ mgr.performAddTwoPoints(pickup, second_best_insertion_pickup, delivery, second_best_insertion_delivery); ShareARide.rejectPickupGoods.remove(pickup); ShareARide.rejectPoints.remove(pickup); ShareARide.rejectPoints.remove(delivery); i } } } private void second_best_insertion_noise_function(){ ShareARide.LOGGER.log(Level.INFO,"Inserting peoples to route"); for(int i=0; i<ShareARide.rejectPickupPeoples.size(); i++){ Point pickup = ShareARide.rejectPickupPeoples.get(i); Point delivery = ShareARide.pickup2Delivery.get(pickup); Point second_best_insertion_pickup = null; Point second_best_insertion_delivery = null; double best_objective = Double.MAX_VALUE; double second_best_objective = Double.MAX_VALUE; for(int k=1; k<=XR.getNbRoutes(); k++){ for(Point p = XR.getStartingPointOfRoute(k); p != XR.getTerminatingPointOfRoute(k); p = XR.next(p)){ //check constraint if(ShareARide.pickup2DeliveryOfPeople.containsKey(p) || S.evaluateAddOnePoint(pickup, p) > 0) continue; //check constraint if(S.evaluateAddTwoPoints(pickup, p, delivery, p) == 0){ //cost improve double cost = objective.evaluateAddTwoPoints(pickup, p, delivery, p); double r = Math.random()*2-1; cost += ShareARide.MAX_DISTANCE*0.1*r; if( cost <= best_objective){ second_best_objective = best_objective; best_objective = cost; }else{ if(cost < second_best_objective){ second_best_objective = cost; second_best_insertion_pickup = p; second_best_insertion_delivery = p; } } } } } if(second_best_insertion_pickup != null && second_best_insertion_delivery != null){ mgr.performAddTwoPoints(pickup, second_best_insertion_pickup, delivery, second_best_insertion_delivery); ShareARide.rejectPickupPeoples.remove(pickup); ShareARide.rejectPoints.remove(pickup); ShareARide.rejectPoints.remove(delivery); i } } ShareARide.LOGGER.log(Level.INFO,"Inserting goods to route"); for(int i=0; i<ShareARide.rejectPickupGoods.size(); i++){ Point pickup = ShareARide.rejectPickupGoods.get(i); Point delivery = ShareARide.pickup2Delivery.get(pickup); Point second_best_insertion_pickup = null; Point second_best_insertion_delivery = null; double best_objective = Double.MAX_VALUE; double second_best_objective = Double.MAX_VALUE; for(int k=1; k<=XR.getNbRoutes(); k++){ for(Point p = XR.getStartingPointOfRoute(k); p != XR.getTerminatingPointOfRoute(k); p = XR.next(p)){ //check constraint if(ShareARide.pickup2DeliveryOfPeople.containsKey(p) || S.evaluateAddOnePoint(pickup, p) > 0) continue; for(Point q = p; q != XR.getTerminatingPointOfRoute(k); q = XR.next(q)){ if(ShareARide.pickup2DeliveryOfPeople.containsKey(q) || S.evaluateAddOnePoint(delivery, q) > 0) continue; if(S.evaluateAddTwoPoints(pickup, p, delivery, q) == 0){ double cost = objective.evaluateAddTwoPoints(pickup, p, delivery, q); double r = Math.random()*2-1; cost += ShareARide.MAX_DISTANCE*0.1*r; if(cost <= best_objective){ second_best_objective = best_objective; best_objective = cost; }else{ if(cost < second_best_objective){ second_best_objective = cost; second_best_insertion_pickup = p; second_best_insertion_delivery = q; } } } } } } if(second_best_insertion_pickup != null && second_best_insertion_delivery != null){ mgr.performAddTwoPoints(pickup, second_best_insertion_pickup, delivery, second_best_insertion_delivery); ShareARide.rejectPickupGoods.remove(pickup); ShareARide.rejectPoints.remove(pickup); ShareARide.rejectPoints.remove(delivery); i } } } private void regret_n_insertion(int n){ ShareARide.LOGGER.log(Level.INFO,"Inserting peoples to route"); for(int i=0; i<ShareARide.rejectPickupPeoples.size(); i++){ Point pickup = ShareARide.rejectPickupPeoples.get(i); Point delivery = ShareARide.pickup2Delivery.get(pickup); Point best_insertion_pickup = null; Point best_insertion_delivery = null; double n_best_objective[] = new double[n]; double best_regret_value = Double.MIN_VALUE; for(int it=0; it<n; it++){ n_best_objective[it] = Double.MAX_VALUE; } for(int k=1; k<=XR.getNbRoutes(); k++){ for(Point p = XR.getStartingPointOfRoute(k); p != XR.getTerminatingPointOfRoute(k); p = XR.next(p)){ //check constraint if(ShareARide.pickup2DeliveryOfPeople.containsKey(p) || S.evaluateAddOnePoint(pickup, p) > 0) continue; if(S.evaluateAddTwoPoints(pickup, p, delivery, p) == 0){ //cost improve double cost = objective.evaluateAddTwoPoints(pickup, p, delivery, p); for(int it=0; it<n; it++){ if(n_best_objective[it] > cost){ for(int it2 = n-1; it2 > it; it2 n_best_objective[it2] = n_best_objective[it2-1]; } n_best_objective[it] = cost; } } double regret_value = 0; for(int it=1; it<n; it++){ regret_value += Math.abs(n_best_objective[it] - n_best_objective[0]); } if(regret_value > best_regret_value){ best_regret_value = regret_value; best_insertion_pickup = p; best_insertion_delivery = p; } } } } if(best_insertion_pickup != null && best_insertion_delivery != null){ mgr.performAddTwoPoints(pickup, best_insertion_pickup, delivery, best_insertion_delivery); ShareARide.rejectPickupPeoples.remove(pickup); ShareARide.rejectPoints.remove(pickup); ShareARide.rejectPoints.remove(delivery); i } } ShareARide.LOGGER.log(Level.INFO,"Inserting goods to route"); for(int i=0; i<ShareARide.rejectPickupGoods.size(); i++){ Point pickup = ShareARide.rejectPickupGoods.get(i); Point delivery = ShareARide.pickup2Delivery.get(pickup); Point best_insertion_pickup = null; Point best_insertion_delivery = null; double n_best_objective[] = new double[n]; double best_regret_value = Double.MIN_VALUE; for(int it=0; it<n; it++){ n_best_objective[it] = Double.MAX_VALUE; } for(int k=1; k<=XR.getNbRoutes(); k++){ for(Point p = XR.getStartingPointOfRoute(k); p != XR.getTerminatingPointOfRoute(k); p = XR.next(p)){ //check constraint if(ShareARide.pickup2DeliveryOfPeople.containsKey(p) || S.evaluateAddOnePoint(pickup, p) > 0) continue; for(Point q = p; q != XR.getTerminatingPointOfRoute(k); q = XR.next(q)){ if(ShareARide.pickup2DeliveryOfPeople.containsKey(q) || S.evaluateAddOnePoint(delivery, q) > 0) continue; if(S.evaluateAddTwoPoints(pickup, p, delivery, q) == 0){ double cost = objective.evaluateAddTwoPoints(pickup, p, delivery, q); for(int it=0; it<n; it++){ if(n_best_objective[it] > cost){ for(int it2 = n-1; it2 > it; it2 n_best_objective[it2] = n_best_objective[it2-1]; } n_best_objective[it] = cost; } } double regret_value = 0; for(int it=1; it<n; it++){ regret_value += Math.abs(n_best_objective[it] - n_best_objective[0]); } if(regret_value > best_regret_value){ best_regret_value = regret_value; best_insertion_pickup = p; best_insertion_delivery = q; } } } } } if(best_insertion_pickup != null && best_insertion_delivery != null){ mgr.performAddTwoPoints(pickup, best_insertion_pickup, delivery, best_insertion_delivery); ShareARide.rejectPickupGoods.remove(pickup); ShareARide.rejectPoints.remove(pickup); ShareARide.rejectPoints.remove(delivery); i } } } //roulette-wheel mechanism private int get_operator(double[] p){ //String message = "probabilities input \n"; int n = p.length; double[] s = new double[n]; s[0] = 0+p[0]; //String messagep = ("p = ["+p[0]+", "); //String messages = ("s = ["+s[0]+", "); for(int i=1; i<n; i++){ //messagep += (p[i]+", "); s[i] = s[i-1]+p[i]; //messages += (s[i]+", "); } //messagep += ("]"); //messages += ("]"); double r = s[n-1]*Math.random(); //String messr = ("radom value = " + r); //message += (messagep +"\n" + messages + "\n" + messr); //ShareARide.LOGGER.log(Level.INFO,message); if(r>=0 && r <= s[0]) return 0; for(int i=1; i<n; i++){ if(r>=s[i-1] && r<=s[i]) return i; } return -1; } }
package at.ac.tuwien.inso.controller.lecturer; import at.ac.tuwien.inso.controller.lecturer.forms.*; import at.ac.tuwien.inso.entity.*; import at.ac.tuwien.inso.service.*; import org.springframework.beans.factory.annotation.*; import org.springframework.stereotype.*; import org.springframework.web.bind.annotation.*; import org.springframework.web.servlet.mvc.support.*; @Controller @RequestMapping("/lecturer/editCourse") public class LecturerEditCourseController { @Autowired private CourseService courseService; @Autowired private TagService tagService; @ModelAttribute("subject") private Subject getSubject(@RequestParam("courseId") Long courseId) { return courseService.findOne(courseId).getSubject(); } @ModelAttribute("addCourseForm") private AddCourseForm getAddCourseForm(@RequestParam("courseId") Long courseId) { Course course = courseService.findOne(courseId); AddCourseForm form = new AddCourseForm(course); form.setInitialTags(tagService.findAll()); form.setInitialActiveTags(course.getTags()); return form; } @GetMapping private String getAddCoursesPage(@RequestParam("courseId") Long courseId) { return "lecturer/editCourse"; } @PostMapping private String createCourse(@ModelAttribute AddCourseForm form, RedirectAttributes redirectAttributes) { Course course = courseService.saveCourse(form); redirectAttributes.addFlashAttribute("editedCourse", course); return "redirect:/lecturer/courses"; } }
package at.ac.tuwien.kr.alpha.grounder.transformation; import at.ac.tuwien.kr.alpha.common.Predicate; import at.ac.tuwien.kr.alpha.common.atoms.Atom; import at.ac.tuwien.kr.alpha.common.atoms.BasicAtom; import at.ac.tuwien.kr.alpha.common.atoms.Literal; import at.ac.tuwien.kr.alpha.common.depgraph.ComponentGraph; import at.ac.tuwien.kr.alpha.common.depgraph.ComponentGraph.SCComponent; import at.ac.tuwien.kr.alpha.common.depgraph.Node; import at.ac.tuwien.kr.alpha.common.depgraph.StratificationHelper; import at.ac.tuwien.kr.alpha.common.program.AnalyzedProgram; import at.ac.tuwien.kr.alpha.common.program.InternalProgram; import at.ac.tuwien.kr.alpha.common.rule.InternalRule; import at.ac.tuwien.kr.alpha.grounder.IndexedInstanceStorage; import at.ac.tuwien.kr.alpha.grounder.Instance; import at.ac.tuwien.kr.alpha.grounder.RuleGroundingOrder; import at.ac.tuwien.kr.alpha.grounder.RuleGroundingOrders; import at.ac.tuwien.kr.alpha.grounder.Substitution; import at.ac.tuwien.kr.alpha.grounder.WorkingMemory; import at.ac.tuwien.kr.alpha.grounder.instantiation.AssignmentStatus; import at.ac.tuwien.kr.alpha.grounder.instantiation.LiteralInstantiationResult; import at.ac.tuwien.kr.alpha.grounder.instantiation.LiteralInstantiator; import at.ac.tuwien.kr.alpha.grounder.instantiation.WorkingMemoryBasedInstantiationStrategy; import org.apache.commons.collections4.SetUtils; import org.apache.commons.lang3.tuple.ImmutablePair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.Stack; // TODO ideally return "PartiallyEvaluatedProgram" here, grounder can use working memories created here rather than re-initialize everything // TODO add solved rules to internal program (in extra list) public class StratifiedEvaluation extends ProgramTransformation<AnalyzedProgram, InternalProgram> { private static final Logger LOGGER = LoggerFactory.getLogger(StratifiedEvaluation.class); private StratificationHelper stratificationHelper = new StratificationHelper(); private WorkingMemory workingMemory = new WorkingMemory(); private Map<Predicate, HashSet<InternalRule>> predicateDefiningRules; private Map<Predicate, Set<Instance>> modifiedInLastEvaluationRun = new HashMap<>(); private Set<Atom> additionalFacts = new HashSet<>(); private Set<Integer> solvedRuleIds = new HashSet<>(); private LiteralInstantiator literalInstantiator; @Override public InternalProgram apply(AnalyzedProgram inputProgram) { // Calculate a stratification and initialize working memory. ComponentGraph componentGraph = inputProgram.getComponentGraph(); Map<Integer, List<SCComponent>> strata = stratificationHelper.calculateStratification(componentGraph); predicateDefiningRules = inputProgram.getPredicateDefiningRules(); // set up list of atoms which are known to be true - we expand on this one Map<Predicate, Set<Instance>> knownFacts = new LinkedHashMap<>(inputProgram.getFactsByPredicate()); for (Map.Entry<Predicate, Set<Instance>> entry : knownFacts.entrySet()) { workingMemory.initialize(entry.getKey()); workingMemory.addInstances(entry.getKey(), true, entry.getValue()); } for (InternalRule nonGroundRule : inputProgram.getRulesById().values()) { // Create working memories for all predicates occurring in the rule for (Predicate predicate : nonGroundRule.getOccurringPredicates()) { workingMemory.initialize(predicate); } } workingMemory.reset(); // Set up literal instantiator. literalInstantiator = new LiteralInstantiator(new WorkingMemoryBasedInstantiationStrategy(workingMemory)); // Evaluate the program part covered by the calculated stratification. ComponentEvaluationOrder evaluationOrder = new ComponentEvaluationOrder(strata); for (SCComponent currComponent : evaluationOrder) { evaluateComponent(currComponent); } // Build the program resulting from evaluating the stratified part. List<Atom> outputFacts = buildOutputFacts(inputProgram.getFacts(), additionalFacts); List<InternalRule> outputRules = new ArrayList<>(); inputProgram.getRulesById().entrySet().stream().filter((entry) -> !solvedRuleIds.contains(entry.getKey())) .forEach((entry) -> outputRules.add(entry.getValue())); return new InternalProgram(outputRules, outputFacts); } // extra method is better visible in CPU traces when profiling private List<Atom> buildOutputFacts(List<Atom> initialFacts, Set<Atom> newFacts) { Set<Atom> atomSet = new LinkedHashSet<>(initialFacts); atomSet.addAll(newFacts); return new ArrayList<>(atomSet); } private void evaluateComponent(SCComponent comp) { LOGGER.debug("Evaluating component {}", comp); ComponentEvaluationInfo evaluationInfo = getRulesToEvaluate(comp); if (evaluationInfo.isEmpty()) { LOGGER.debug("No rules to evaluate for component {}", comp); return; } prepareComponentEvaluation(SetUtils.union(evaluationInfo.nonRecursiveRules, evaluationInfo.recursiveRules)); // Rules outside of dependency cycles only need to be evaluated once. if (!evaluationInfo.nonRecursiveRules.isEmpty()) { addFactsToProgram(evaluateRules(evaluationInfo.nonRecursiveRules, true)); } boolean isInitialRun = true; if (!evaluationInfo.recursiveRules.isEmpty()) { do { // Now do the rules that cyclically depend on each other, // evaluate these until nothing new can be derived any more. addFactsToProgram(evaluateRules(evaluationInfo.recursiveRules, isInitialRun)); isInitialRun = false; // If evaluation of rules doesn't modify the working memory we have a fixed point. } while (!workingMemory.modified().isEmpty()); } LOGGER.debug("Evaluation done - reached a fixed point on component {}", comp); SetUtils.union(evaluationInfo.nonRecursiveRules, evaluationInfo.recursiveRules) .forEach((rule) -> solvedRuleIds.add(rule.getRuleId())); } private Map<Predicate, List<Instance>> evaluateRules(Set<InternalRule> rules, boolean isInitialRun) { Map<Predicate, List<Instance>> addedInstances = new HashMap<>(); workingMemory.reset(); LOGGER.debug("Starting component evaluation run..."); for (InternalRule r : rules) { evaluateRule(r, !isInitialRun); } modifiedInLastEvaluationRun = new HashMap<>(); // Since we're stratified we never have to backtrack, therefore just collect the added instances. for (IndexedInstanceStorage instanceStorage : workingMemory.modified()) { // NOTE: We're only dealing with positive instances. addedInstances.putIfAbsent(instanceStorage.getPredicate(), new ArrayList<>()); addedInstances.get(instanceStorage.getPredicate()).addAll(instanceStorage.getRecentlyAddedInstances()); modifiedInLastEvaluationRun.putIfAbsent(instanceStorage.getPredicate(), new LinkedHashSet<>()); modifiedInLastEvaluationRun.get(instanceStorage.getPredicate()).addAll(instanceStorage.getRecentlyAddedInstances()); instanceStorage.markRecentlyAddedInstancesDone(); } return addedInstances; } /** * To be called at the start of evaluateComponent. Adds all known instances of the predicates occurring in the given set * of rules to the "modifiedInLastEvaluationRun" map in order to "bootstrap" incremental grounding, i.e. making sure * that those instances are taken into account for ground substitutions by evaluateRule. */ private void prepareComponentEvaluation(Set<InternalRule> rulesToEvaluate) { modifiedInLastEvaluationRun = new HashMap<>(); Predicate tmpPredicate; IndexedInstanceStorage tmpInstances; for (InternalRule rule : rulesToEvaluate) { // register rule head instances tmpPredicate = rule.getHeadAtom().getPredicate(); tmpInstances = workingMemory.get(tmpPredicate, true); modifiedInLastEvaluationRun.putIfAbsent(tmpPredicate, new LinkedHashSet<>()); if (tmpInstances != null) { modifiedInLastEvaluationRun.get(tmpPredicate).addAll(tmpInstances.getAllInstances()); } // register positive body instances for (Literal lit : rule.getPositiveBody()) { tmpPredicate = lit.getPredicate(); tmpInstances = workingMemory.get(tmpPredicate, true); modifiedInLastEvaluationRun.putIfAbsent(tmpPredicate, new LinkedHashSet<>()); if (tmpInstances != null) { modifiedInLastEvaluationRun.get(tmpPredicate).addAll(tmpInstances.getAllInstances()); } } } } private void evaluateRule(InternalRule rule, boolean checkAllStartingLiterals) { LOGGER.debug("Evaluating rule {}", rule); List<Substitution> satisfyingSubstitutions = calculateSatisfyingSubstitutionsForRule(rule, checkAllStartingLiterals); for (Substitution subst : satisfyingSubstitutions) { fireRule(rule, subst); } } private List<Substitution> calculateSatisfyingSubstitutionsForRule(InternalRule rule, boolean checkAllStartingLiterals) { LOGGER.debug("Grounding rule {}", rule); RuleGroundingOrders groundingOrders = rule.getGroundingOrders(); List<Substitution> groundSubstitutions = new ArrayList<>(); // the actual full ground substitutions for the rule LOGGER.debug("Is fixed rule? {}", rule.getGroundingOrders().fixedInstantiation()); if (groundingOrders.fixedInstantiation()) { // Note: Representation of fixed grounding orders should be refactored in RuleGroundingOrders. RuleGroundingOrder fixedGroundingOrder = groundingOrders.getFixedGroundingOrder(); groundSubstitutions.addAll(calcSubstitutionsWithGroundingOrder(fixedGroundingOrder, Collections.singletonList(new Substitution()))); } else { List<Literal> startingLiterals = groundingOrders.getStartingLiterals(); List<Substitution> substitutionsForStartingLiteral; if (!checkAllStartingLiterals) { // If we don't have to check all literals, i.e. we're in the first evaluation run, just use the first one Literal lit = startingLiterals.get(0); substitutionsForStartingLiteral = calcSubstitutionsWithGroundingOrder(groundingOrders.orderStartingFrom(lit), substituteFromRecentlyAddedInstances(lit)); groundSubstitutions.addAll(substitutionsForStartingLiteral); } else { for (Literal lit : startingLiterals) { substitutionsForStartingLiteral = calcSubstitutionsWithGroundingOrder(groundingOrders.orderStartingFrom(lit), substituteFromRecentlyAddedInstances(lit)); groundSubstitutions.addAll(substitutionsForStartingLiteral); } } } return groundSubstitutions; } /** * Use this to find initial substitutions for a starting literal when grounding a rule. * In order to avoid finding the same ground instantiations of rules again, only look at * <code>modifiedInLastEvaluationRun</code> to obtain instances. * * @param lit the literal to substitute * @return valid ground substitutions for the literal based on the recently added instances (i.e. instances derived in * the last evaluation run) */ private List<Substitution> substituteFromRecentlyAddedInstances(Literal lit) { List<Substitution> retVal = new ArrayList<>(); Set<Instance> instances = modifiedInLastEvaluationRun.get(lit.getPredicate()); if (instances == null) { return Collections.emptyList(); } Substitution initialSubstitutionForCurrentInstance; for (Instance instance : instances) { initialSubstitutionForCurrentInstance = Substitution.unify(lit, instance, new Substitution()); if (initialSubstitutionForCurrentInstance != null) { retVal.add(initialSubstitutionForCurrentInstance); } } return retVal; } private List<Substitution> calcSubstitutionsWithGroundingOrderOld(RuleGroundingOrder groundingOrder, List<Substitution> startingSubstitutions) { // Iterate through the grounding order starting at index startFromOrderPosition. // Whenever instantiation of a Literal with a given substitution causes // a result with a type other than CONTINUE, discard that substitution. List<Substitution> currentSubstitutions = startingSubstitutions; List<Substitution> updatedSubstitutions = new ArrayList<>(); Literal currentLiteral; LiteralInstantiationResult currentLiteralResult; int curentOrderPosition = 0; while ((currentLiteral = groundingOrder.getLiteralAtOrderPosition(curentOrderPosition)) != null) { for (Substitution subst : currentSubstitutions) { currentLiteralResult = literalInstantiator.instantiateLiteral(currentLiteral, subst); if (currentLiteralResult.getType() == LiteralInstantiationResult.Type.CONTINUE) { for (ImmutablePair<Substitution, AssignmentStatus> pair : currentLiteralResult.getSubstitutions()) { updatedSubstitutions.add(pair.left); } } } if (updatedSubstitutions.isEmpty()) { // In this case it doesn't make any sense to advance further in the grounding order. return Collections.emptyList(); } currentSubstitutions = updatedSubstitutions; updatedSubstitutions = new ArrayList<>(); curentOrderPosition++; } return currentSubstitutions; } private List<Substitution> calcSubstitutionsWithGroundingOrder(RuleGroundingOrder groundingOrder, List<Substitution> startingSubstitutions) { // Iterate through the grounding order starting at index startFromOrderPosition. // Whenever instantiation of a Literal with a given substitution causes // a result with a type other than CONTINUE, discard that substitution. List<Substitution> fullSubstitutions = new ArrayList<>(); Stack<ArrayList<Substitution>> substitutionStack = new Stack<>(); // For speed, we really want ArrayLists on the stack. if (startingSubstitutions instanceof ArrayList) { substitutionStack.push((ArrayList<Substitution>) startingSubstitutions); } else { substitutionStack.push(new ArrayList<>(startingSubstitutions)); // Copy startingSubstitutions into ArrayList. Note: mostly happens for empty or singleton lists. } int currentOrderPosition = 0; while (!substitutionStack.isEmpty()) { List<Substitution> currentSubstitutions = substitutionStack.peek(); // If no more substitutions remain at current position, all have been processed, continue on next lower level. if (currentSubstitutions.isEmpty()) { substitutionStack.pop(); currentOrderPosition continue; } // In case the full grounding order has been worked on, all current substitutions are full substitutions, add them to result. Literal currentLiteral = groundingOrder.getLiteralAtOrderPosition(currentOrderPosition); if (currentLiteral == null) { fullSubstitutions.addAll(currentSubstitutions); currentSubstitutions.clear(); // Continue on next lower level. substitutionStack.pop(); currentOrderPosition continue; } // Take one substitution from the top-list of the stack and try extending it. Substitution currentSubstitution = currentSubstitutions.remove(currentSubstitutions.size() - 1); // Work on last element (removing last element is O(1) for ArrayList). LiteralInstantiationResult currentLiteralResult = literalInstantiator.instantiateLiteral(currentLiteral, currentSubstitution); if (currentLiteralResult.getType() == LiteralInstantiationResult.Type.CONTINUE) { // The currentSubstitution could be extended, push the extensions on the stack and continue working on them. ArrayList<Substitution> furtheredSubstitutions = new ArrayList<>(); for (ImmutablePair<Substitution, AssignmentStatus> resultSubstitution : currentLiteralResult.getSubstitutions()) { furtheredSubstitutions.add(resultSubstitution.left); } substitutionStack.push(furtheredSubstitutions); // Continue work on the higher level. currentOrderPosition++; } } return fullSubstitutions; } private void fireRule(InternalRule rule, Substitution substitution) { Atom newAtom = rule.getHeadAtom().substitute(substitution); if (!newAtom.isGround()) { throw new IllegalStateException("Trying to fire rule " + rule.toString() + " with incompatible substitution " + substitution.toString()); } LOGGER.debug("Firing rule - got head atom: {}", newAtom); workingMemory.addInstance(newAtom, true); } private ComponentEvaluationInfo getRulesToEvaluate(SCComponent comp) { Set<InternalRule> nonRecursiveRules = new HashSet<>(); Set<InternalRule> recursiveRules = new HashSet<>(); HashSet<InternalRule> definingRules; Set<Predicate> headPredicates = new HashSet<>(); for (Node node : comp.getNodes()) { headPredicates.add(node.getPredicate()); } for (Predicate headPredicate : headPredicates) { definingRules = predicateDefiningRules.get(headPredicate); if (definingRules == null) { // predicate only occurs in facts continue; } for (InternalRule rule : definingRules) { for (Literal lit : rule.getPositiveBody()) { if (headPredicates.contains(lit.getPredicate())) { // rule body contains a predicate that is defined in the same component, // rule is therefore part of a dependency chain within this component and must be evaluated repeatedly recursiveRules.add(rule); } else { nonRecursiveRules.add(rule); } } } } return new ComponentEvaluationInfo(nonRecursiveRules, recursiveRules); } private void addFactsToProgram(Map<Predicate, List<Instance>> instances) { for (Entry<Predicate, List<Instance>> entry : instances.entrySet()) { for (Instance inst : entry.getValue()) { additionalFacts.add(new BasicAtom(entry.getKey(), inst.terms)); } } } private class ComponentEvaluationOrder implements Iterable<SCComponent> { private Iterator<Entry<Integer, List<SCComponent>>> strataIterator; private Iterator<SCComponent> componentIterator; private ComponentEvaluationOrder(Map<Integer, List<SCComponent>> stratification) { strataIterator = stratification.entrySet().iterator(); startNextStratum(); } private boolean startNextStratum() { if (!strataIterator.hasNext()) { return false; } componentIterator = strataIterator.next().getValue().iterator(); return true; } @Override public Iterator<SCComponent> iterator() { return new Iterator<SCComponent>() { @Override public boolean hasNext() { if (componentIterator == null) { // can happen when there are actually no components, as is the case for empty programs or programs just consisting of // facts return false; } if (componentIterator.hasNext()) { return true; } else { if (!startNextStratum()) { return false; } else { return hasNext(); } } } @Override public SCComponent next() { return componentIterator.next(); } }; } } private class ComponentEvaluationInfo { final Set<InternalRule> nonRecursiveRules; final Set<InternalRule> recursiveRules; ComponentEvaluationInfo(Set<InternalRule> nonRecursive, Set<InternalRule> recursive) { nonRecursiveRules = Collections.unmodifiableSet(nonRecursive); recursiveRules = Collections.unmodifiableSet(recursive); } boolean isEmpty() { return nonRecursiveRules.isEmpty() && recursiveRules.isEmpty(); } } }
package com.foundationdb.server.store.format.tuple; import com.foundationdb.ais.model.Group; import com.foundationdb.ais.model.Index; import com.foundationdb.ais.model.HasStorage; import com.foundationdb.ais.model.StorageDescription; import com.foundationdb.ais.model.validation.AISValidationFailure; import com.foundationdb.ais.model.validation.AISValidationOutput; import com.foundationdb.ais.protobuf.AISProtobuf.Storage; import com.foundationdb.ais.protobuf.FDBProtobuf.TupleUsage; import com.foundationdb.ais.protobuf.FDBProtobuf; import com.foundationdb.server.error.StorageDescriptionInvalidException; import com.foundationdb.server.rowdata.RowData; import com.foundationdb.server.rowdata.RowDef; import com.foundationdb.server.store.FDBStore; import com.foundationdb.server.store.FDBStoreData; import com.foundationdb.server.store.format.FDBStorageDescription; import com.foundationdb.tuple.ByteArrayUtil; import com.foundationdb.tuple.Tuple; import com.google.protobuf.Descriptors.FileDescriptor; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; import com.persistit.Key; import java.util.List; public class TupleStorageDescription extends FDBStorageDescription { private TupleUsage usage; public TupleStorageDescription(HasStorage forObject) { super(forObject); } public TupleStorageDescription(HasStorage forObject, TupleStorageDescription other) { super(forObject, other); this.usage = other.usage; } @Override public StorageDescription cloneForObject(HasStorage forObject) { return new TupleStorageDescription(forObject, this); } public TupleUsage getUsage() { return usage; } public void setUsage(TupleUsage usage) { this.usage = usage; } @Override public void writeProtobuf(Storage.Builder builder) { super.writeProtobuf(builder); if (usage != null) { builder.setExtension(FDBProtobuf.tupleUsage, usage); } writeUnknownFields(builder); } @Override public void validate(AISValidationOutput output) { super.validate(output); if (usage == null) { return; } if (usage == TupleUsage.KEY_AND_ROW) { if (!(object instanceof Group)) { output.reportFailure(new AISValidationFailure(new StorageDescriptionInvalidException(object, "is not a Group and has no row"))); return; } if (!((Group)object).getRoot().getChildJoins().isEmpty()) { output.reportFailure(new AISValidationFailure(new StorageDescriptionInvalidException(object, "has more than one table"))); return; } } List<String> illegal; if (object instanceof Group) { illegal = TupleRowDataConverter.checkTypes((Group)object, usage); } else if (object instanceof Index) { illegal = TupleRowDataConverter.checkTypes((Index)object, usage); } else { output.reportFailure(new AISValidationFailure(new StorageDescriptionInvalidException(object, "is not a Group or Index and cannot use Tuples"))); return; } if (!illegal.isEmpty()) { output.reportFailure(new AISValidationFailure(new StorageDescriptionInvalidException(object, "has some types that cannot be stored in a Tuple: " + illegal))); } } @Override public byte[] getKeyBytes(Key key, Key.EdgeValue edge) { if (usage != null) { Object[] keys = new Object[key.getDepth()]; key.reset(); for (int i = 0; i < keys.length; i++) { keys[i] = key.decode(); } byte[] bytes = Tuple.from(keys).pack(); if (edge == Key.BEFORE) { return ByteArrayUtil.join(bytes, new byte[1]); } else if (edge == Key.AFTER) { return ByteArrayUtil.strinc(bytes); } else { return bytes; } } else { return super.getKeyBytes(key, edge); } } @Override public void getTupleKey(Tuple t, Key key) { if (usage != null) { key.clear(); for (Object seg : t) { key.append(seg); } } else { super.getTupleKey(t, key); } } @Override public void packRowData(FDBStore store, FDBStoreData storeData, RowData rowData) { if (usage == TupleUsage.KEY_AND_ROW) { RowDef rowDef = ((Group)object).getRoot().rowDef(); assert (rowDef.getRowDefId() == rowData.getRowDefId()) : rowData; Tuple t = TupleRowDataConverter.tupleFromRowData(rowDef, rowData); storeData.value = t.pack(); } else { super.packRowData(store, storeData, rowData); } } @Override public void expandRowData(FDBStore store, FDBStoreData storeData, RowData rowData) { if (usage == TupleUsage.KEY_AND_ROW) { Tuple t = Tuple.fromBytes(storeData.value); RowDef rowDef = ((Group)object).getRoot().rowDef(); TupleRowDataConverter.tupleToRowData(t, rowDef, rowData); } else { super.expandRowData(store, storeData, rowData); } } }
package com.github.bingoohuang.springrest.boot.interceptor; import com.github.bingoohuang.springrest.boot.annotations.RestfulSign; import com.github.bingoohuang.utils.codec.Base64; import com.github.bingoohuang.utils.net.Http; import com.google.common.base.Joiner; import com.google.common.base.Strings; import com.google.common.base.Throwables; import com.google.common.collect.Maps; import com.google.common.io.CharStreams; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.util.MultiValueMap; import org.springframework.web.method.HandlerMethod; import org.springframework.web.multipart.MultipartFile; import org.springframework.web.multipart.MultipartHttpServletRequest; import org.springframework.web.servlet.handler.HandlerInterceptorAdapter; import javax.crypto.Mac; import javax.crypto.spec.SecretKeySpec; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.StringWriter; import java.security.MessageDigest; import java.util.*; public class SignInterceptor extends HandlerInterceptorAdapter { public static final String CLIENT_SECURITY = "d51fd93e-f6c9-4eae-ae7a-9b37af1a60cc"; @Override public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception { if (!(handler instanceof HandlerMethod)) return false; HandlerMethod method = (HandlerMethod) handler; Class<?> beanType = method.getBeanType(); boolean ignoreSign = ignoreSign(beanType, method); Logger logger = LoggerFactory.getLogger("rest." + beanType.getName()); if (ignoreSign && !logger.isInfoEnabled()) return true; String hici = request.getHeader("hici"); if (StringUtils.isEmpty(hici)) hici = UUID.randomUUID().toString(); request.setAttribute("_log_hici", hici); request.setAttribute("_log_start", System.currentTimeMillis()); String originalStr = createOriginalStringForSign(request); logger.info("spring rest server {} request {}", hici, originalStr.replace("\n", "\\n")); if (ignoreSign) return true; String hisv = request.getHeader("hisv"); if (Strings.isNullOrEmpty(hisv)) { logger.info("spring rest server {} signature missed", hici); Http.error(response, 416, "signature missed"); return false; } String sign = hmacSHA256(originalStr, CLIENT_SECURITY); boolean signOk = sign.equals(hisv); logger.info("spring rest server {} sign result {}", hici, signOk); if (!signOk) Http.error(response, 416, "invalid signature"); return signOk; } @Override public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) throws Exception { if (!(handler instanceof HandlerMethod)) return; HandlerMethod method = (HandlerMethod) handler; Class<?> beanType = method.getBeanType(); Logger logger = LoggerFactory.getLogger("rest." + beanType.getName()); if (!logger.isInfoEnabled()) return; StringBuilder headerSb = new StringBuilder(); Collection<String> headerNames = response.getHeaderNames(); Joiner joiner = Joiner.on(','); for (String headerName : headerNames) { headerSb.append(headerName).append('='); Collection<String> headers = response.getHeaders(headerName); joiner.join(headers); headerSb.append(headers).append('&'); } String contentType = response.getContentType(); headerSb.append("Content-Type=").append(contentType); ByteArrayOutputStream baos = (ByteArrayOutputStream) request.getAttribute("_log_baos"); StringWriter sw = (StringWriter) request.getAttribute("_log_sw"); String body = null; String lowerContentType = StringUtils.lowerCase(contentType); if (containsAnyOrNull(lowerContentType, "json", "xml", "text")) { byte[] bytes = baos.toByteArray(); if (bytes.length > 0) { body = new String(bytes, "UTF-8"); } else { body = sw.toString(); } } if (body.contains("<html>") || body == null) body = " ignored"; body = body.replaceAll("\\r?\\n", "\\n"); String hici = (String) request.getAttribute("_log_hici"); Long start = (Long) request.getAttribute("_log_start"); long costMillis = System.currentTimeMillis() - start; logger.info("spring rest server {} response cost {} millis, status code {}, headers: {}, body: {}", hici, costMillis, response.getStatus(), headerSb, body); } private boolean containsAnyOrNull(String contentType, String... any) { if (contentType == null) return true; for (String item : any) { if (contentType.contains(item)) return true; } return false; } private String createOriginalStringForSign(HttpServletRequest request) { StringBuilder signStr = new StringBuilder(); appendMethodAndUrl(request, signStr); appendHeaders(request, signStr); appendRequestParams(request, signStr); return signStr.toString(); } private boolean ignoreSign(Class<?> beanType, HandlerMethod method) { RestfulSign restfulSign = method.getMethod().getAnnotation(RestfulSign.class); if (restfulSign != null) return restfulSign.ignore(); restfulSign = beanType.getAnnotation(RestfulSign.class); if (restfulSign != null) return restfulSign.ignore(); return true; } public static String hmacSHA256(String data, String key) { try { SecretKeySpec secretKey = new SecretKeySpec(key.getBytes("UTF-8"), "HmacSHA256"); Mac mac = Mac.getInstance("HmacSHA256"); mac.init(secretKey); byte[] hmacData = mac.doFinal(data.getBytes("UTF-8")); return Base64.base64(hmacData, Base64.Format.Standard); } catch (Exception e) { throw Throwables.propagate(e); } } private void appendRequestParams(HttpServletRequest request, StringBuilder signStr) { Map<String, String[]> parameterMap = Maps.newTreeMap(); parameterMap.putAll(request.getParameterMap()); String json = getJson(request); if (!Strings.isNullOrEmpty(json)) parameterMap.put("_json", new String[]{json}); fileUpload(request, parameterMap); String queryString = request.getQueryString(); for (Map.Entry<String, String[]> entry : parameterMap.entrySet()) { String parameterName = entry.getKey(); if (isQueryParameter(queryString, parameterName)) continue; signStr.append(parameterName).append('$'); for (String value : entry.getValue()) { signStr.append(value).append('$'); } } } private void fileUpload(HttpServletRequest request, Map<String, String[]> parameterMap) { if (!(request instanceof MultipartHttpServletRequest)) return; MultipartHttpServletRequest mreq = (MultipartHttpServletRequest) request; MultiValueMap<String, MultipartFile> multiFileMap = mreq.getMultiFileMap(); for (Map.Entry<String, List<MultipartFile>> entry : multiFileMap.entrySet()) { String name = entry.getKey(); StringBuilder sb = new StringBuilder(); List<MultipartFile> value = entry.getValue(); for (MultipartFile file : value) { sb.append(md5(getBytes(file))).append('$'); } if (sb.length() > 0) sb.delete(sb.length() - 1, sb.length()); parameterMap.put(name, new String[]{sb.toString()}); } } private byte[] getBytes(MultipartFile value) { try { return value.getBytes(); } catch (IOException e) { throw new RuntimeException(e); } } public static String md5(byte[] bytes) { try { MessageDigest md = MessageDigest.getInstance("MD5"); byte[] digest = md.digest(bytes); return Base64.base64(digest, Base64.Format.Standard); } catch (Exception e) { throw new RuntimeException(e); } } public String getJson(HttpServletRequest request) { if (!"POST".equalsIgnoreCase(request.getMethod())) return null; String contentType = request.getHeader("content-type"); if (contentType == null) return null; if (contentType.indexOf("application/json") < 0) return null; try { BufferedReader reader = request.getReader(); return CharStreams.toString(reader); } catch (IOException e) { throw new RuntimeException(e); } } private boolean isQueryParameter(String queryString, String parameterName) { if (Strings.isNullOrEmpty(queryString)) return false; int index = queryString.indexOf(parameterName); if (index < 0) return false; if (index > 0 && queryString.charAt(index - 1) != '&') return false; int offset = index + parameterName.length(); if (offset >= queryString.length()) return true; return queryString.charAt(offset) == '='; } private static String[] filtered = new String[]{ "hisv", "accept-encoding", "user-agent", "host", "connection", "content-length", "content-type" }; private void appendHeaders(HttpServletRequest request, StringBuilder signStr) { Enumeration<String> headerNames = request.getHeaderNames(); while (headerNames.hasMoreElements()) { String headerName = headerNames.nextElement(); if (ArrayUtils.contains(filtered, headerName)) continue; Enumeration<String> headers = request.getHeaders(headerName); signStr.append(headerName).append('$'); joinEnumeration(signStr, headers); } } private void joinEnumeration(StringBuilder signStr, Enumeration<String> headers) { while (headers.hasMoreElements()) { signStr.append(headers.nextElement()).append('$'); } } private void appendMethodAndUrl(HttpServletRequest request, StringBuilder signStr) { signStr.append(request.getMethod()).append('$'); StringBuilder fullUrl = new StringBuilder(request.getRequestURL()); String queryString = request.getQueryString(); if (!Strings.isNullOrEmpty(queryString)) fullUrl.append('?').append(queryString); signStr.append(fullUrl).append('$'); } }
package com.github.onsdigital.babbage.publishing.model; import com.github.onsdigital.babbage.configuration.Configuration; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; public class PublishNotification { private String key; private String collectionId; private List<String> uriList; private String publishDate; public String getKey() { return key; } public void setKey(String key) { this.key = key; } public List<String> getUriList() { return uriList; } public void setUriList(List<String> uriList) { this.uriList = uriList; } public String getPublishDate() { return publishDate; } public Date getDate(){ if (publishDate == null) return null; try { return new SimpleDateFormat(Configuration.CONTENT_SERVICE.getDefaultContentDatePattern()).parse(publishDate); } catch (ParseException e) { System.err.println("Warning!!!!!!!! Publish date for publish notification is invalid, can not parse to date"); e.printStackTrace(); return null; } } public void setPublishDate(String publishDate) { this.publishDate = publishDate; } public String getCollectionId() { return collectionId; } public void setCollectionId(String collectionId) { this.collectionId = collectionId; } }
package edu.neu.ccs.pyramid.multilabel_classification.crf; import edu.neu.ccs.pyramid.dataset.MultiLabel; import edu.neu.ccs.pyramid.dataset.MultiLabelClfDataSet; import edu.neu.ccs.pyramid.multilabel_classification.MLScorer; import edu.neu.ccs.pyramid.optimization.LBFGS; import edu.neu.ccs.pyramid.util.MathUtil; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.util.List; import java.util.stream.IntStream; public class LogRiskOptimizer { private static final Logger logger = LogManager.getLogger(); private MultiLabelClfDataSet dataSet; private CMLCRF crf; // size = [num data][num combination] private double[][] targets; // todo // should be the same as crf combination private List<MultiLabel> combinations; // size = [num data][num combination] private double[][] scores; private double variance; // size = [num data][num combination] private double[][] probabilities; private MLScorer mlScorer; private boolean expScore = false; private boolean multiplyScore = false; private double scoreMultiplier = 1; public LogRiskOptimizer(MultiLabelClfDataSet dataSet, MLScorer mlScorer, CMLCRF crf, double variance, boolean expScore, boolean multiplyScore, double scoreMultiplier) { this.dataSet = dataSet; this.variance = variance; this.crf = crf; this.mlScorer = mlScorer; this.combinations = crf.getSupportCombinations(); this.scores = new double[dataSet.getNumDataPoints()][combinations.size()]; for (int i=0;i<dataSet.getNumDataPoints();i++){ for (int j=0;j<combinations.size();j++){ MultiLabel truth = dataSet.getMultiLabels()[i]; MultiLabel combination = combinations.get(j); double f = mlScorer.score(dataSet.getNumClasses(),truth,combination); scores[i][j] = f; // todo the order matters here if (expScore){ scores[i][j] = Math.exp(scores[i][j]); } if (multiplyScore){ scores[i][j] = scores[i][j]*scoreMultiplier; } } } this.targets = new double[dataSet.getNumDataPoints()][combinations.size()]; this.probabilities = new double[dataSet.getNumDataPoints()][combinations.size()]; this.updateProbabilities(); if (logger.isDebugEnabled()){ logger.debug("finish constructor"); } } private void updateProbabilities(int dataPointIndex){ probabilities[dataPointIndex] = crf.predictCombinationProbs(dataSet.getRow(dataPointIndex)); } private void updateProbabilities(){ if (logger.isDebugEnabled()){ logger.debug("start updateProbabilities()"); } IntStream.range(0, dataSet.getNumDataPoints()).parallel().forEach(this::updateProbabilities); if (logger.isDebugEnabled()){ logger.debug("finish updateProbabilities()"); } } private void updateTargets(int dataPointIndex){ double[] probs = probabilities[dataPointIndex]; double[] product = new double[probs.length]; double[] s = this.scores[dataPointIndex]; for (int j=0;j<probs.length;j++){ product[j] = probs[j]*s[j]; } double denominator = MathUtil.arraySum(product); for (int j=0;j<probs.length;j++){ targets[dataPointIndex][j] = product[j]/denominator; } } public void iterate(){ updateTargets(); updateModel(); updateProbabilities(); } private void updateTargets(){ if (logger.isDebugEnabled()){ logger.debug("start updateTargets()"); } IntStream.range(0, dataSet.getNumDataPoints()).parallel().forEach(this::updateTargets); if (logger.isDebugEnabled()){ logger.debug("finish updateTargets()"); } } private void updateModel(){ if (logger.isDebugEnabled()){ logger.debug("start updateModel()"); } KLLoss klLoss = new KLLoss(crf, dataSet, targets, variance); LBFGS lbfgs = new LBFGS(klLoss); lbfgs.optimize(); if (logger.isDebugEnabled()){ logger.debug("finish updateModel()"); } } private double objective(int dataPointIndex){ double sum = 0; double[] p = probabilities[dataPointIndex]; double[] s = scores[dataPointIndex]; for (int j=0;j<p.length;j++){ sum += p[j]*s[j]; } return -Math.log(sum); } public double objective(){ if (logger.isDebugEnabled()){ logger.debug("start objective()"); } double obj= IntStream.range(0, dataSet.getNumDataPoints()).parallel() .mapToDouble(this::objective).sum(); if (logger.isDebugEnabled()){ logger.debug("finish obj"); } double penalty = penalty(); if (logger.isDebugEnabled()){ logger.debug("finish penalty"); } if (logger.isDebugEnabled()){ logger.debug("finish objective()"); } return obj+penalty; } // regularization private double penalty(){ KLLoss klLoss = new KLLoss(crf, dataSet, targets, variance); return klLoss.getPenalty(); } }
package edu.ucar.unidata.rosetta.init.resources; import edu.ucar.unidata.rosetta.domain.resources.*; import edu.ucar.unidata.rosetta.exceptions.RosettaDataException; import org.apache.commons.io.FilenameUtils; import org.apache.commons.lang3.StringUtils; import org.apache.log4j.Logger; import org.springframework.dao.NonTransientDataAccessResourceException; import java.io.File; import java.sql.*; import java.util.*; /** * An implementation of the DbInitManager that creates the default embedded derby database for the rosetta application. * * @author oxelson@ucar.edu */ public class EmbeddedDerbyDbInitManager implements DbInitManager { private static final Logger logger = Logger.getLogger(EmbeddedDerbyDbInitManager.class); /** * Adds default admin user during database creation. * * @param props RosettaProperties from which the database username and password are glean. * @throws NonTransientDataAccessResourceException If unable to create instance of the database * driver. * @throws SQLException If an SQL exceptions occurs during insert transaction. */ private void addDefaultAdminUser(Properties props) throws NonTransientDataAccessResourceException, SQLException { Connection connection; PreparedStatement preparedStatement; try { Class.forName(props.getProperty("jdbc.driverClassName")); } catch (ClassNotFoundException e) { throw new NonTransientDataAccessResourceException( "Unable to find database drive class: " + e); } String username = StringUtils.stripToNull(props.getProperty("jdbc.username")); String password = StringUtils.stripToNull(props.getProperty("jdbc.password")); String url = props.getProperty("jdbc.url"); if (username != null && password != null) { connection = DriverManager.getConnection(url, username, password); } else { connection = DriverManager.getConnection(url); } String statement = "INSERT INTO users " + "(userName, password, accessLevel, accountStatus, emailAddress, fullName, dateCreated, dateModified) VALUES " + "(?,?,?,?,?,?,?,?)"; preparedStatement = connection.prepareStatement(statement); preparedStatement.setString(1, "admin"); preparedStatement.setString(2, "$2a$10$gJ4ITtIMNpxsU0xmx6qoE.0MGZ2fv8HpoaL1IlgNdhBlUgmcVwRDO"); preparedStatement.setInt(3, 2); preparedStatement.setInt(4, 1); preparedStatement.setString(5, "admin@foo.bar.baz"); preparedStatement.setString(6, "Rosetta Administrator"); preparedStatement.setTimestamp(7, new Timestamp(System.currentTimeMillis())); preparedStatement.setTimestamp(8, new Timestamp(System.currentTimeMillis())); preparedStatement.executeUpdate(); // Clean up. if (preparedStatement != null) { preparedStatement.close(); } if (connection != null) { connection.close(); } } /** * Creates the embedded derby database for the rosetta. * * @param props RosettaProperties used to create the database. * @throws NonTransientDataAccessResourceException If unable to create or access the database. * @throws SQLException If an SQL exceptions occurs during database creation. * @throws RosettaDataException If unable to access the rosetta resources to persist. */ @Override public void createDatabase(Properties props) throws NonTransientDataAccessResourceException, SQLException, RosettaDataException { // Get relevant properties. String rosettaHome = props.getProperty("rosetta.home"); String databaseName = props.getProperty("jdbc.dbName"); String url = props.getProperty("jdbc.url") + rosettaHome + "/" + databaseName; props.setProperty("jdbc.url", url); // Create derby database file. File dbFile = new File(FilenameUtils.concat(rosettaHome, databaseName)); Connection connection; if (!dbFile.exists()) { logger.info("Database does not exist yet. Creating..."); // Create the database tables; String createPropertiesTable = "CREATE TABLE properties " + "(" + "id INTEGER primary key not null GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1), " + "propertyKey VARCHAR(255) not null, " + "propertyValue VARCHAR(255) not null, " + "dateCreated TIMESTAMP not null" + ")"; createTable(createPropertiesTable, props); // Populate properties table. populatePropertiesTable(props); // Table containing uploaded file data. String createUploadedFileTable = "CREATE TABLE uploadedFiles " + "(" + "id VARCHAR(255) not null, " + "fileName VARCHAR(255) not null, " + "fileType VARCHAR(50) not null" + ")"; createTable(createUploadedFileTable, props); String createWizardDataTable = "CREATE TABLE wizardData " + "(" + "id VARCHAR(255) primary key not null, " + "cfType VARCHAR(50), " + "community VARCHAR(100), " + "metadataProfile VARCHAR(255), " + "platform VARCHAR(100), " + "dataFileType VARCHAR(255), " + "headerLineNumbers VARCHAR(255), " + "delimiter VARCHAR(255)" + ")"; createTable(createWizardDataTable, props); String createVariablesTable = "CREATE TABLE variables " + "(" + "variableId INTEGER primary key not null GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1), " + "wizardDataId VARCHAR(255)," + "columnNumber INT, " + "variableName VARCHAR(255), " + "metadataType VARCHAR(14), " + "metadataTypeStructure VARCHAR(20), " + "verticalDirection VARCHAR(4), " + "metadataValueType VARCHAR(10)" + ")"; createTable(createVariablesTable, props); String createVariableMetadataTable = "CREATE TABLE variableMetadata " + "(" + "variableId INT, " + "complianceLevel VARCHAR(255), " + "metadataKey VARCHAR(255), " + "metadataValue VARCHAR(255)" + ")"; createTable(createVariableMetadataTable, props); String createGlobalMetadataTable = "CREATE TABLE globalMetadata " + "(" + "wizardDataId VARCHAR(255)," + "metadataGroup VARCHAR(255), " + "metadataValueType VARCHAR(255), " + "metadataKey VARCHAR(255), " + "metadataValue VARCHAR(255)" + ")"; createTable(createGlobalMetadataTable, props); String createMetadataProfileDataTable = "CREATE TABLE metadataProfileData " + "(" + "id INTEGER primary key not null GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1), " + "attributeName VARCHAR(100), " + "complianceLevel VARCHAR(12), " + "description CLOB(64000), " + "displayName VARCHAR(255), " + "exampleValues CLOB(64000), " + "metadataGroup VARCHAR(255), " + "metadataProfileName VARCHAR(20), " + "metadataProfileVersion VARCHAR(20), " + "metadataType VARCHAR(255), " + "metadataTypeStructureName VARCHAR(255), " + "metadataValueType VARCHAR(255)" + ")"; createTable(createMetadataProfileDataTable, props); String createIgnoreListTable = "CREATE TABLE ignoreList " + "(" + "id INTEGER primary key not null GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1), " + "metadataType VARCHAR(255), " + "attributeName VARCHAR(100)" + ")"; createTable(createIgnoreListTable, props); insertMetadataProfiles(props); String createPlatformTable = "CREATE TABLE platforms " + "(" + "id INTEGER primary key not null GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1), " + "name VARCHAR(255), " + "imgPath VARCHAR(255), " + "cfType INTEGER, " + "community INTEGER" + ")"; createTable(createPlatformTable, props); String createFileTypeTable = "CREATE TABLE fileTypes " + "(" + "id INTEGER primary key not null GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1), " + "name VARCHAR(255)" + ")"; createTable(createFileTypeTable, props); String createCfTypeTable = "CREATE TABLE cfTypes " + "(" + "id INTEGER primary key not null GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1), " + "name VARCHAR(255)" + ")"; createTable(createCfTypeTable, props); String createMetadataProfileTable = "CREATE TABLE metadataProfiles " + "(" + "id INTEGER primary key not null GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1), " + "name VARCHAR(10), " + "community INTEGER" + ")"; createTable(createMetadataProfileTable, props); String createCommunityTable = "CREATE TABLE communities " + "(" + "id INTEGER primary key not null GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1), " + "name VARCHAR(255), " + "fileType INTEGER " + ")"; createTable(createCommunityTable, props); String createDelimiterTable = "CREATE TABLE delimiters " + "(" + "id INTEGER primary key not null GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1), " + "name VARCHAR(255), " + "characterSymbol VARCHAR(10)" + ")"; createTable(createDelimiterTable, props); // Insert the resources into the db. insertResources(props); String createUsersTable = "CREATE TABLE users" + "(" + "userId INTEGER primary key not null GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1), " + "userName VARCHAR(50) not null, " + "password VARCHAR(80) not null, " + "accessLevel INTEGER not null, " + "accountStatus INTEGER not null, " + "emailAddress VARCHAR(75) not null, " + "fullName VARCHAR(100) not null, " + "dateCreated TIMESTAMP not null, " + "dateModified TIMESTAMP not null" + ")"; createTable(createUsersTable, props); // Add default admin user to users table. addDefaultAdminUser(props); // Okay, we're done. Shut down this particular connection to the database. try { connection = DriverManager.getConnection(url + ";shutdown=true"); if (connection != null) { connection.close(); } } catch (SQLException e) { // As per the Derby docs, the shutdown commands always raise SQLExceptions. (lame!) logger.info("Finished creating database. Shutting down database..."); } } else { // Update existing properties table. populatePropertiesTable(props); logger.info("Nothing to do here... Database already exists."); } } /** * Creates a table in the derby database. * * @param statement The create SQL statement. * @param props RosettaProperties from which the database username and password are glean. * @throws NonTransientDataAccessResourceException If unable to create instance of the database * driver. * @throws SQLException If an SQL exceptions occurs during create table transaction. */ private void createTable(String statement, Properties props) throws NonTransientDataAccessResourceException, SQLException { Connection connection; PreparedStatement preparedStatement; try { Class.forName(props.getProperty("jdbc.driverClassName")); } catch (ClassNotFoundException e) { throw new NonTransientDataAccessResourceException( "Unable to find database drive class: " + e); } String username = StringUtils.stripToNull(props.getProperty("jdbc.username")); String password = StringUtils.stripToNull(props.getProperty("jdbc.password")); String url = props.getProperty("jdbc.url") + ";create=true"; if (username != null && password != null) { connection = DriverManager.getConnection(url, username, password); } else { connection = DriverManager.getConnection(url); } preparedStatement = connection.prepareStatement(statement); preparedStatement.executeUpdate(); // Clean up. if (preparedStatement != null) { preparedStatement.close(); } if (connection != null) { connection.close(); } } /** * * @param props * @throws SQLException * @throws RosettaDataException */ private void insertMetadataProfiles(Properties props) throws SQLException, RosettaDataException { Connection connection; PreparedStatement preparedStatement = null; try { Class.forName(props.getProperty("jdbc.driverClassName")); } catch (ClassNotFoundException e) { throw new NonTransientDataAccessResourceException("Unable to find database drive class: " + e); } String username = StringUtils.stripToNull(props.getProperty("jdbc.username")); String password = StringUtils.stripToNull(props.getProperty("jdbc.password")); String url = props.getProperty("jdbc.url"); if (username != null && password != null) { connection = DriverManager.getConnection(url, username, password); } else { connection = DriverManager.getConnection(url); } String insertStatement = "INSERT INTO metadataProfileData (" + "attributeName, complianceLevel, description, exampleValues, " + "metadataGroup, metadataProfileName, metadataProfileVersion, " + "metadataType, metadataTypeStructureName, metadataValueType) " + "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; MetadataProfileLoader metadataProfileLoader = new MetadataProfileLoader(); List<edu.ucar.unidata.rosetta.domain.MetadataProfile> metadataProfiles = metadataProfileLoader.loadMetadataProfiles(); for (edu.ucar.unidata.rosetta.domain.MetadataProfile metadataProfile : metadataProfiles) { preparedStatement = connection.prepareStatement(insertStatement); preparedStatement.setString(1, metadataProfile.getAttributeName()); preparedStatement.setString(2, metadataProfile.getComplianceLevel()); preparedStatement.setString(3, metadataProfile.getDescription()); preparedStatement.setString(4, metadataProfile.getExampleValues()); preparedStatement.setString(5, metadataProfile.getMetadataGroup()); preparedStatement.setString(6, metadataProfile.getMetadataProfileName()); preparedStatement.setString(7, metadataProfile.getMetadataProfileVersion()); preparedStatement.setString(8, metadataProfile.getMetadataType()); preparedStatement.setString(9, metadataProfile.getMetadataTypeStructureName()); preparedStatement.setString(10, metadataProfile.getMetadataValueType()); preparedStatement.executeUpdate(); } String[] ignoreListValues = { "CoordinateVariable=axis", "CoordinateVariable=coverage_content_type", "CoordinateVariable=_FillValue", "CoordinateVariable=valid_min", "CoordinateVariable=valid_min", "CoordinateVariable=valid_max", "DataVariable=_FillValue", "DataVariable=coordinates", "DataVariable=coverage_content_type", "DataVariable=valid_min", "DataVariable=valid_max", "Global=featureType", "Global=conventions", "MetadataGroup=geospatial_lat_start", "MetadataGroup=geospatial_lon_start", "MetadataGroup=time_coverage_start", "MetadataGroup=geospatial_lat_end", "MetadataGroup=geospatial_lon_end", "MetadataGroup=time_coverage_end" }; insertStatement = "INSERT INTO ignoreList (metadataType, attributeName) VALUES (?, ?)"; for (int i = 0; i < ignoreListValues.length; i++) { preparedStatement = connection.prepareStatement(insertStatement); String[] ignore = ignoreListValues[i].split("="); preparedStatement.setString(1, ignore[0]); preparedStatement.setString(2, ignore[1]); preparedStatement.executeUpdate(); } } /** * Inserts the rosetta-specific resources glean from xml files into the database. * * @param props RosettaProperties from which the database username and password are glean. * @throws NonTransientDataAccessResourceException If unable to create instance of database * driver. * @throws SQLException If an SQL exceptions occurs during insert transaction. * @throws RosettaDataException If unable to access the resource to persist. */ private void insertResources(Properties props) throws NonTransientDataAccessResourceException, SQLException, RosettaDataException { Connection connection; PreparedStatement preparedStatement = null; try { Class.forName(props.getProperty("jdbc.driverClassName")); } catch (ClassNotFoundException e) { throw new NonTransientDataAccessResourceException( "Unable to find database drive class: " + e); } String username = StringUtils.stripToNull(props.getProperty("jdbc.username")); String password = StringUtils.stripToNull(props.getProperty("jdbc.password")); String url = props.getProperty("jdbc.url"); if (username != null && password != null) { connection = DriverManager.getConnection(url, username, password); } else { connection = DriverManager.getConnection(url); } // Define our statements for the various resource types. String delimiterStatement = "INSERT INTO delimiters (name, characterSymbol) VALUES (?, ?)"; String cfTypeStatement = "INSERT INTO cfTypes (name) VALUES (?)"; String fileTypeStatement = "INSERT INTO fileTypes (name) VALUES (?)"; String metadataProfileStatement = "INSERT INTO metadataProfiles (name, community) VALUES (?, ?)"; String platformStatement = "INSERT INTO platforms (name, imgPath, cfType, community) VALUES (?, ?, ?, ?)"; String communityStatement = "INSERT INTO communities (name, fileType) VALUES (?, ?)"; ResourceLoader resourceManager = new ResourceLoader(); List<RosettaResource> resources = resourceManager.loadResources(); for (RosettaResource resource : resources) { // Set the resources depending on the type. if (resource instanceof CfType) { // CF type resource. preparedStatement = connection.prepareStatement(cfTypeStatement); preparedStatement.setString(1, resource.getName()); preparedStatement.executeUpdate(); } else if (resource instanceof Delimiter) { // Delimiter resource. preparedStatement = connection.prepareStatement(delimiterStatement); preparedStatement.setString(1, resource.getName()); preparedStatement.setString(2, ((Delimiter) resource).getCharacterSymbol()); preparedStatement.executeUpdate(); } else if (resource instanceof FileType) { // File type resource. preparedStatement = connection.prepareStatement(fileTypeStatement); preparedStatement.setString(1, resource.getName()); preparedStatement.executeUpdate(); } else if (resource instanceof Platform) { // Platform resource. // Get the primary key values for the cfTypes and stash them in a map for quick access. Map<String, Integer> cfTypeMap = new HashMap<>(); String getCfTypeStatement = "SELECT * FROM cfTypes"; preparedStatement = connection.prepareStatement(getCfTypeStatement); ResultSet rs = preparedStatement.executeQuery(); while (rs.next()) { int id = rs.getInt("id"); String name = rs.getString("name"); cfTypeMap.put(name, id); } // Get the primary key values for the communities and stash them in a map for quick access. Map<String, Integer> communityMap = new HashMap<>(); String getCommunityStatement = "SELECT DISTINCT id, name FROM communities"; preparedStatement = connection.prepareStatement(getCommunityStatement); rs = preparedStatement.executeQuery(); while (rs.next()) { int id = rs.getInt("id"); String name = rs.getString("name"); communityMap.put(name, id); } preparedStatement = connection.prepareStatement(platformStatement); preparedStatement.setString(1, resource.getName()); preparedStatement.setString(2, ((Platform) resource).getImgPath()); preparedStatement.setInt(3, cfTypeMap.get(((Platform) resource).getCfType())); preparedStatement.setInt(4, communityMap.get(((Platform) resource).getCommunity())); preparedStatement.executeUpdate(); } else if (resource instanceof Community) { // Community resource. // Get the primary key values for the file types and stash them in a map for quick access. Map<String, Integer> fileTypeMap = new HashMap<>(); String getFileTypeStatement = "SELECT * FROM fileTypes"; preparedStatement = connection.prepareStatement(getFileTypeStatement); ResultSet rs = preparedStatement.executeQuery(); while (rs.next()) { int id = rs.getInt("id"); String name = rs.getString("name"); fileTypeMap.put(name, id); } // Create an entry in the communities table for all of the file types. List<String> fileTypes = ((Community) resource).getFileType(); for (String fileType : fileTypes) { preparedStatement = connection.prepareStatement(communityStatement); preparedStatement.setString(1, resource.getName()); preparedStatement.setInt(2, fileTypeMap.get(fileType)); preparedStatement.executeUpdate(); } } else { // Metadata profile resource. // Get the primary key values for the communities and stash them in a map for quick access. Map<String, Integer> communityMap = new HashMap<>(); String getCommunityStatement = "SELECT DISTINCT id, name FROM communities"; preparedStatement = connection.prepareStatement(getCommunityStatement); ResultSet rs = preparedStatement.executeQuery(); while (rs.next()) { int id = rs.getInt("id"); String name = rs.getString("name"); communityMap.put(name, id); } // Create an entry in the metadata profiles table for all of the communities. List<Community> communities = ((MetadataProfile) resource).getCommunities(); for (Community community : communities) { preparedStatement = connection.prepareStatement(metadataProfileStatement); preparedStatement.setString(1, resource.getName()); preparedStatement.setInt(2, communityMap.get(community.getName())); preparedStatement.executeUpdate(); } } } // Clean up. if (preparedStatement != null) { preparedStatement.close(); } if (connection != null) { connection.close(); } } /** * Populates a table with configuration properties data. * * @param props RosettaProperties from which the database username and password are glean. * @throws NonTransientDataAccessResourceException If unable to create instance of the database * driver. * @throws SQLException If an SQL exceptions occurs during insert transaction. */ private void populatePropertiesTable(Properties props) throws NonTransientDataAccessResourceException, SQLException { Connection connection; PreparedStatement preparedStatement; try { Class.forName(props.getProperty("jdbc.driverClassName")); } catch (ClassNotFoundException e) { throw new NonTransientDataAccessResourceException( "Unable to find database drive class: " + e); } String username = StringUtils.stripToNull(props.getProperty("jdbc.username")); String password = StringUtils.stripToNull(props.getProperty("jdbc.password")); String url = props.getProperty("jdbc.url"); if (username != null && password != null) { connection = DriverManager.getConnection(url, username, password); } else { connection = DriverManager.getConnection(url); } // See if properties have already been persisted prior to this time. String statement = "SELECT * FROM properties"; preparedStatement = connection.prepareStatement(statement); ResultSet rs = preparedStatement.executeQuery(); Map<String, String> propertiesMap = new HashMap<>(); while (rs.next()) { propertiesMap.put(rs.getString("propertyKey"), rs.getString("propertyValue")); } // Create prepared statements to persist the property data. If the data is already persisted, compare the // value to what is stored in the database. Log and differences and update the persisted value if necessary. // TODO: In future, notify admin of first these differences via interface and let him/her sort it out. Enumeration propertyNames = (Enumeration) props.propertyNames(); while (propertyNames.hasMoreElements()) { String key = (String) propertyNames.nextElement(); String value = props.getProperty(key); if (propertiesMap.containsKey(key)) { // Property has been already been persisted. See if persisted value matches what is in the properties. if (!propertiesMap.get(key).equals(value)) { // Persisted data has different value than what is in the properties file. Update persisted data. logger.info("Persisted " + key + " to be changed from " + propertiesMap.get(key) + " to " + value); statement = "UPDATE properties SET propertyKey = ?, propertyValue = ? AND dateCreated =? WHERE propertyKey = ?"; preparedStatement = connection.prepareStatement(statement); preparedStatement.setString(1, key); preparedStatement.setString(2, value); preparedStatement.setTimestamp(3, new Timestamp(System.currentTimeMillis())); preparedStatement.setString(4, key); preparedStatement.executeUpdate(); } } else { // Property has NOT been persisted before. Add it. statement = "INSERT INTO properties(propertyKey, propertyValue, dateCreated) " + "VALUES (?,?,?)"; preparedStatement = connection.prepareStatement(statement); preparedStatement.setString(1, key); preparedStatement.setString(2, value); preparedStatement.setTimestamp(3, new Timestamp(System.currentTimeMillis())); preparedStatement.executeUpdate(); } } // Clean up. if (preparedStatement != null) { preparedStatement.close(); } if (connection != null) { connection.close(); } } /** * Shuts down the embedded derby database by de-registering the driver. * * @param props RosettaProperties from application.properties that may be used for database * shutdown. * @throws SQLException If an SQL exceptions occurs during database shutdown. */ public void shutdownDatabase(Properties props) throws SQLException { Connection connection; try { Class.forName(props.getProperty("jdbc.driverClassName")); } catch (ClassNotFoundException e) { throw new NonTransientDataAccessResourceException( "Unable to find database drive class: " + e); } String username = StringUtils.stripToNull(props.getProperty("jdbc.username")); String password = StringUtils.stripToNull(props.getProperty("jdbc.password")); String url = props.getProperty("jdbc.url") + ";create=true"; // Okay, we're done. Shut down this particular connection to the database. try { if (username != null && password != null) { connection = DriverManager.getConnection(url + ";shutdown=true", username, password); } else { connection = DriverManager.getConnection(url + ";shutdown=true"); } if (connection != null) { connection.close(); } } catch (SQLException e) { // As per the Derby docs, the shutdown commands always raise SQLExceptions. (lame!) logger.info("Shutting down database..."); } Driver driver = DriverManager.getDriver(url); logger.info("De-registering jdbc driver."); DriverManager.deregisterDriver(driver); } }
package eu.spitfire.ssp.server.channels.handler.cache; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.ProtocolException; import java.net.URI; import java.net.URL; import java.nio.file.Path; import java.util.Date; import java.util.concurrent.ScheduledExecutorService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.util.concurrent.SettableFuture; import com.hp.hpl.jena.datatypes.xsd.XSDDatatype; import com.hp.hpl.jena.ontology.OntModel; import com.hp.hpl.jena.ontology.OntModelSpec; import com.hp.hpl.jena.query.Dataset; import com.hp.hpl.jena.query.Query; import com.hp.hpl.jena.query.QueryExecution; import com.hp.hpl.jena.query.QueryExecutionFactory; import com.hp.hpl.jena.query.QueryFactory; import com.hp.hpl.jena.query.ReadWrite; import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.query.ResultSetFormatter; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.RDFNode; import com.hp.hpl.jena.rdf.model.Statement; import com.hp.hpl.jena.tdb.TDB; import com.hp.hpl.jena.tdb.TDBFactory; import com.hp.hpl.jena.vocabulary.OWL; import com.hp.hpl.jena.vocabulary.RDF; import com.hp.hpl.jena.vocabulary.RDFS; import eu.spitfire.ssp.backends.generic.messages.InternalResourceStatusMessage; public class JenaTdbSemanticCache extends SemanticCache { private static final String SPT_SOURCE = "http://spitfire-project.eu/ontology.rdf"; private static final String SPT_NS = "http://spitfire-project.eu/ontology/ns/"; private static final String SPTSN_SOURCE = "http://spitfire-project.eu/sn.rdf"; private static OntModel ontologyBaseModel = null; private Logger log = LoggerFactory.getLogger(this.getClass().getName()); private Dataset dataset; public JenaTdbSemanticCache(ScheduledExecutorService scheduledExecutorService, Path dbDirectory) { super(scheduledExecutorService); dataset = TDBFactory.createDataset(dbDirectory.toString()); TDB.getContext().set(TDB.symUnionDefaultGraph, true); //Collect the SPITFIRE vocabularies if (ontologyBaseModel == null) { ontologyBaseModel = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM); if (isUriAccessible(SPT_SOURCE)) { ontologyBaseModel.read(SPT_SOURCE, "RDF/XML"); } if (isUriAccessible(SPTSN_SOURCE)) { ontologyBaseModel.read(SPTSN_SOURCE, "RDF/XML"); } } Model owlFullModel = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM_MICRO_RULE_INF); owlFullModel.add(ontologyBaseModel); dataset.addNamedModel(SPT_NS, owlFullModel); } private static boolean isUriAccessible(String uri) { HttpURLConnection connection = null; int code = -1; URL myurl; try { myurl = new URL(uri); connection = (HttpURLConnection) myurl.openConnection(); connection.setRequestMethod("GET"); connection.setConnectTimeout(1000); code = connection.getResponseCode(); } catch (MalformedURLException e) { System.err.println(uri + " is not accessible."); } catch (ProtocolException e) { System.err.println(uri + " is not accessible."); } catch (IOException e) { System.err.println(uri + " is not accessible."); } return (code == 200) ? true : false; } @Override public InternalResourceStatusMessage getCachedResource(URI resourceUri) throws Exception { dataset.begin(ReadWrite.READ); try { Model model = dataset.getNamedModel(resourceUri.toString()); if (model.isEmpty()) { log.warn("No cached status found for resource {}", resourceUri); return null; } log.info("Cached status found for resource {}", resourceUri); return new InternalResourceStatusMessage(model, new Date()); } finally { dataset.end(); } } @Override public void putResourceToCache(URI resourceUri, Model resourceStatus) throws Exception { deleteResource(resourceUri); // Model owlFullModel = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM_MICRO_RULE_INF); // owlFullModel.add(ontologyBaseModel); // owlFullModel.add(resourceStatus); //dataset.addNamedModel(resourceUri.toString(), resourceStatus); dataset.begin(ReadWrite.WRITE); try { // dataset.addNamedModel(SPT_NS, owlFullModel); dataset.addNamedModel(resourceUri.toString(), resourceStatus); dataset.commit(); log.debug("Added status for resource {}", resourceUri); } finally { dataset.end(); } } @Override public void deleteResource(URI resourceUri) throws Exception { dataset.begin(ReadWrite.WRITE); try { dataset.removeNamedModel(resourceUri.toString()); dataset.commit(); log.debug("Removed status for resource {}", resourceUri); } finally { dataset.end(); } } @Override public void updateStatement(Statement statement) throws Exception { dataset.begin(ReadWrite.WRITE); try { Model tdbModel = dataset.getNamedModel(statement.getSubject().toString()); Statement oldStatement = tdbModel.getProperty(statement.getSubject(), statement.getPredicate()); Statement updatedStatement; if (oldStatement != null) { if ("http://spitfire-project.eu/ontology/ns/value".equals(oldStatement.getPredicate().toString())) { RDFNode object = tdbModel.createTypedLiteral(statement.getObject().asLiteral().getFloat(), XSDDatatype.XSDfloat); updatedStatement = oldStatement.changeObject(object); dataset.commit(); } else { updatedStatement = oldStatement.changeObject(statement.getObject()); dataset.commit(); } log.info("Updated property {} of resource {} to {}", new Object[]{updatedStatement.getPredicate(), updatedStatement.getSubject(), updatedStatement.getObject()}); } else log.warn("Resource {} not (yet?) found. Could not update property {}.", statement.getSubject(), statement.getPredicate()); } finally { dataset.end(); } } public synchronized void processSparqlQuery(SettableFuture<String> queryResultFuture, String sparqlQuery) { dataset.begin(ReadWrite.READ); try { log.info("Start SPARQL query processing: {}", sparqlQuery); Query query = QueryFactory.create(sparqlQuery); QueryExecution queryExecution = QueryExecutionFactory.create(query, dataset); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { ResultSet resultSet = queryExecution.execSelect(); ResultSetFormatter.outputAsXML(baos, resultSet); } finally { queryExecution.close(); } String result = baos.toString("UTF-8"); queryResultFuture.set(result); } catch (Exception e) { queryResultFuture.setException(e); } finally { dataset.end(); } } @Override public boolean supportsSPARQL() { return true; } }
package hudson.plugins.scm_sync_configuration; import com.google.common.io.Files; import hudson.model.Hudson; import hudson.model.User; import hudson.plugins.scm_sync_configuration.exceptions.LoggableException; import hudson.plugins.scm_sync_configuration.model.*; import hudson.plugins.scm_sync_configuration.strategies.ScmSyncStrategy; import hudson.plugins.scm_sync_configuration.utils.Checksums; import hudson.util.DaemonThreadFactory; import org.apache.commons.io.FileUtils; import org.apache.maven.scm.ScmException; import org.apache.maven.scm.manager.ScmManager; import org.codehaus.plexus.PlexusContainerException; import org.codehaus.plexus.component.repository.exception.ComponentLookupException; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.util.*; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.logging.Logger; public class ScmSyncConfigurationBusiness { private static final String WORKING_DIRECTORY_PATH = "/scm-sync-configuration/"; private static final String CHECKOUT_SCM_DIRECTORY = "checkoutConfiguration"; private static final Logger LOGGER = Logger.getLogger(ScmSyncConfigurationBusiness.class.getName()); private boolean checkoutSucceeded; private SCMManipulator scmManipulator; private File checkoutScmDirectory = null; private ScmSyncConfigurationStatusManager scmSyncConfigurationStatusManager = null; /** * Use of a size 1 thread pool frees us from worrying about accidental thread death and * changeset commit concurrency */ /*package*/ final ExecutorService writer = Executors.newFixedThreadPool(1, new DaemonThreadFactory()); // TODO: Refactor this into the plugin object ??? private List<Commit> commitsQueue = Collections.synchronizedList(new ArrayList<Commit>()); public ScmSyncConfigurationBusiness(){ } public ScmSyncConfigurationStatusManager getScmSyncConfigurationStatusManager() { if (scmSyncConfigurationStatusManager == null) { scmSyncConfigurationStatusManager = new ScmSyncConfigurationStatusManager(); } return scmSyncConfigurationStatusManager; } public void init(ScmContext scmContext) throws ComponentLookupException, PlexusContainerException { ScmManager scmManager = SCMManagerFactory.getInstance().createScmManager(); this.scmManipulator = new SCMManipulator(scmManager); this.checkoutScmDirectory = new File(getCheckoutScmDirectoryAbsolutePath()); this.checkoutSucceeded = false; initializeRepository(scmContext, false); } public void initializeRepository(ScmContext scmContext, boolean deleteCheckoutScmDir){ // Let's check if everything is available to checkout sources if(scmManipulator != null && scmManipulator.scmConfigurationSettledUp(scmContext, true)){ LOGGER.info("Initializing SCM repository for scm-sync-configuration plugin ..."); // If checkoutScmDirectory was not empty and deleteCheckoutScmDir is asked, reinitialize it ! if(deleteCheckoutScmDir){ cleanChekoutScmDirectory(); } // Creating checkout scm directory if(!checkoutScmDirectory.exists()){ try { FileUtils.forceMkdir(checkoutScmDirectory); LOGGER.info("Directory ["+ checkoutScmDirectory.getAbsolutePath() +"] created !"); } catch (IOException e) { LOGGER.warning("Directory ["+ checkoutScmDirectory.getAbsolutePath() +"] cannot be created !"); } } this.checkoutSucceeded = this.scmManipulator.checkout(this.checkoutScmDirectory); if(this.checkoutSucceeded){ LOGGER.info("SCM repository initialization done."); } signal("Checkout " + this.checkoutScmDirectory, this.checkoutSucceeded); } } public void cleanChekoutScmDirectory(){ if(checkoutScmDirectory != null && checkoutScmDirectory.exists()){ LOGGER.info("Deleting old checkout SCM directory ..."); try { FileUtils.forceDelete(checkoutScmDirectory); } catch (IOException e) { LOGGER.throwing(FileUtils.class.getName(), "forceDelete", e); LOGGER.severe("Error while deleting ["+checkoutScmDirectory.getAbsolutePath()+"] : "+e.getMessage()); } this.checkoutSucceeded = false; } } public List<File> deleteHierarchy(ScmContext scmContext, Path hierarchyPath){ if(scmManipulator == null || !scmManipulator.scmConfigurationSettledUp(scmContext, false)){ return null; } File rootHierarchyTranslatedInScm = hierarchyPath.getScmFile(); List<File> filesToCommit = scmManipulator.deleteHierarchy(rootHierarchyTranslatedInScm); // Once done, we should delete path in scm if it is a directory if(hierarchyPath.isDirectory()){ try { FileUtils.deleteDirectory(rootHierarchyTranslatedInScm); } catch (IOException e) { throw new LoggableException("Failed to recursively delete scm directory "+rootHierarchyTranslatedInScm.getAbsolutePath(), FileUtils.class, "deleteDirectory", e); } } signal("Delete " + hierarchyPath, filesToCommit != null); return filesToCommit; } public Future<Void> queueChangeSet(final ScmContext scmContext, ChangeSet changeset, User user, String userMessage) { if(scmManipulator == null || !scmManipulator.scmConfigurationSettledUp(scmContext, false)){ LOGGER.info("Queue of changeset "+changeset.toString()+" aborted (scm manipulator not settled !)"); return null; } Commit commit = new Commit(changeset, user, userMessage, scmContext); LOGGER.finest("Queuing commit "+commit.toString()+" to SCM ..."); commitsQueue.add(commit); return writer.submit(new Callable<Void>() { public Void call() throws Exception { processCommitsQueue(); return null; } }); } private void processCommitsQueue() { File scmRoot = new File(getCheckoutScmDirectoryAbsolutePath()); // Copying shared commitQueue in order to allow conccurrent modification List<Commit> currentCommitQueue = new ArrayList<Commit>(commitsQueue); List<Commit> checkedInCommits = new ArrayList<Commit>(); try { // Reading commit queue and commiting changeset for(Commit commit: currentCommitQueue){ String logMessage = "Processing commit : " + commit.toString(); LOGGER.finest(logMessage); // Preparing files to add / delete List<File> updatedFiles = new ArrayList<File>(); for(Map.Entry<Path,byte[]> pathContent : commit.getChangeset().getPathContents().entrySet()){ Path pathRelativeToJenkinsRoot = pathContent.getKey(); byte[] content = pathContent.getValue(); File fileTranslatedInScm = pathRelativeToJenkinsRoot.getScmFile(); if(pathRelativeToJenkinsRoot.isDirectory()) { if(!fileTranslatedInScm.exists()){ // Retrieving non existing parent scm path *before* copying it from jenkins directory String firstNonExistingParentScmPath = pathRelativeToJenkinsRoot.getFirstNonExistingParentScmPath(); try { FileUtils.copyDirectory(JenkinsFilesHelper.buildFileFromPathRelativeToHudsonRoot(pathRelativeToJenkinsRoot.getPath()), fileTranslatedInScm); } catch (IOException e) { throw new LoggableException("Error while copying file hierarchy to SCM checkouted directory", FileUtils.class, "copyDirectory", e); } updatedFiles.addAll(scmManipulator.addFile(scmRoot, firstNonExistingParentScmPath)); } } else { // We should remember if file in scm existed or not before any manipulation, // especially writing content boolean fileTranslatedInScmInitiallyExists = fileTranslatedInScm.exists(); boolean fileContentModified = writeScmContentOnlyIfItDiffers(pathRelativeToJenkinsRoot, content, fileTranslatedInScm); if(fileTranslatedInScmInitiallyExists){ if(fileContentModified){ // No need to call scmManipulator.addFile() if fileTranslatedInScm already existed updatedFiles.add(fileTranslatedInScm); } } else { updatedFiles.addAll(scmManipulator.addFile(scmRoot, pathRelativeToJenkinsRoot.getPath())); } } } for(Path path : commit.getChangeset().getPathsToDelete()){ List<File> deletedFiles = deleteHierarchy(commit.getScmContext(), path); updatedFiles.addAll(deletedFiles); } if(updatedFiles.isEmpty()){ LOGGER.finest("Empty changeset to commit (no changes found on files) => commit skipped !"); } else { // Commiting files... boolean result = scmManipulator.checkinFiles(scmRoot, commit.getMessage()); if(result){ LOGGER.finest("Commit "+commit.toString()+" pushed to SCM !"); checkedInCommits.add(commit); } else { throw new LoggableException("Error while checking in file to scm repository", SCMManipulator.class, "checkinFiles"); } signal(logMessage, true); } } // As soon as a commit doesn't goes well, we should abort commit queue processing... }catch(LoggableException e){ LOGGER.throwing(e.getClazz().getName(), e.getMethodName(), e); LOGGER.severe("Error while processing commit queue : "+e.getMessage()); signal(e.getMessage(), false); } finally { // We should remove every checkedInCommits commitsQueue.removeAll(checkedInCommits); } } private boolean writeScmContentOnlyIfItDiffers(Path pathRelativeToJenkinsRoot, byte[] content, File fileTranslatedInScm) throws LoggableException { boolean scmContentUpdated = false; boolean contentDiffer = false; try { contentDiffer = !Checksums.fileAndByteArrayContentAreEqual(fileTranslatedInScm, content); } catch (IOException e) { throw new LoggableException("Error while checking content checksum", Checksums.class, "fileAndByteArrayContentAreEqual", e); } if(contentDiffer){ createScmContent(pathRelativeToJenkinsRoot, content, fileTranslatedInScm); scmContentUpdated = true; } else { // Don't do anything } return scmContentUpdated; } private void createScmContent(Path pathRelativeToJenkinsRoot, byte[] content, File fileTranslatedInScm) throws LoggableException { Stack<File> directoriesToCreate = new Stack<File>(); File directory = fileTranslatedInScm.getParentFile(); // Eventually, creating non existing enclosing directories while(!directory.exists()){ directoriesToCreate.push(directory); directory = directory.getParentFile(); } while(!directoriesToCreate.empty()){ directory = directoriesToCreate.pop(); if(!directory.mkdir()){ throw new LoggableException("Error while creating directory "+directory.getAbsolutePath(), File.class, "mkdir"); } } try { // Copying content if pathRelativeToJenkinsRoot is a file, or creating the directory if it is a directory if(pathRelativeToJenkinsRoot.isDirectory()){ if(!fileTranslatedInScm.mkdir()){ throw new LoggableException("Error while creating directory "+fileTranslatedInScm.getAbsolutePath(), File.class, "mkdir"); } } else { Files.write(content, fileTranslatedInScm); } } catch (IOException e) { throw new LoggableException("Error while creating file in checkouted directory", Files.class, "write", e); } } public void synchronizeAllConfigs(ScmSyncStrategy[] availableStrategies){ List<File> filesToSync = new ArrayList<File>(); // Building synced files from strategies for(ScmSyncStrategy strategy : availableStrategies){ filesToSync.addAll(strategy.createInitializationSynchronizedFileset()); } ScmSyncConfigurationPlugin plugin = ScmSyncConfigurationPlugin.getInstance(); plugin.startThreadedTransaction(); try { for(File fileToSync : filesToSync){ String hudsonConfigPathRelativeToHudsonRoot = JenkinsFilesHelper.buildPathRelativeToHudsonRoot(fileToSync); plugin.getTransaction().defineCommitMessage(new WeightedMessage("Repository initialization", MessageWeight.IMPORTANT)); plugin.getTransaction().registerPath(hudsonConfigPathRelativeToHudsonRoot); } } finally { plugin.getTransaction().commit(); } } public boolean scmCheckoutDirectorySettledUp(ScmContext scmContext){ return scmManipulator != null && this.scmManipulator.scmConfigurationSettledUp(scmContext, false) && this.checkoutSucceeded; } public List<File> reloadAllFilesFromScm() throws IOException, ScmException { this.scmManipulator.update(new File(getCheckoutScmDirectoryAbsolutePath())); return syncDirectories(new File(getCheckoutScmDirectoryAbsolutePath() + File.separator), ""); } private List<File> syncDirectories(File from, String relative) throws IOException { List<File> l = new ArrayList<File>(); for(File f : from.listFiles()) { String newRelative = relative + File.separator + f.getName(); File jenkinsFile = new File(Hudson.getInstance().getRootDir() + newRelative); if (f.getName().equals(scmManipulator.getScmSpecificFilename())) { // nothing to do } else if (f.isDirectory()) { if (!jenkinsFile.exists()) { FileUtils.copyDirectory(f, jenkinsFile, new FileFilter() { public boolean accept(File f) { return !f.getName().equals(scmManipulator.getScmSpecificFilename()); } }); l.add(jenkinsFile); } else { l.addAll(syncDirectories(f, newRelative)); } } else { if (!jenkinsFile.exists() || !FileUtils.contentEquals(f, jenkinsFile)) { FileUtils.copyFile(f, jenkinsFile); l.add(jenkinsFile); } } } return l; } private void signal(String operation, boolean result) { if (result) { getScmSyncConfigurationStatusManager().signalSuccess(); } else { getScmSyncConfigurationStatusManager().signalFailed(operation); } } public static String getCheckoutScmDirectoryAbsolutePath(){ return Hudson.getInstance().getRootDir().getAbsolutePath()+WORKING_DIRECTORY_PATH+CHECKOUT_SCM_DIRECTORY; } }
package hudson.plugins.scm_sync_configuration; import com.google.common.io.Files; import hudson.model.Hudson; import hudson.model.User; import hudson.plugins.scm_sync_configuration.exceptions.LoggableException; import hudson.plugins.scm_sync_configuration.model.*; import hudson.plugins.scm_sync_configuration.strategies.ScmSyncStrategy; import hudson.plugins.scm_sync_configuration.utils.Checksums; import hudson.util.DaemonThreadFactory; import org.apache.commons.io.FileUtils; import org.apache.maven.scm.ScmException; import org.apache.maven.scm.manager.ScmManager; import org.codehaus.plexus.PlexusContainerException; import org.codehaus.plexus.component.repository.exception.ComponentLookupException; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.util.*; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.logging.Logger; public class ScmSyncConfigurationBusiness { private static final String WORKING_DIRECTORY_PATH = "/scm-sync-configuration/"; private static final String CHECKOUT_SCM_DIRECTORY = "checkoutConfiguration"; private static final Logger LOGGER = Logger.getLogger(ScmSyncConfigurationBusiness.class.getName()); private boolean checkoutSucceeded; private SCMManipulator scmManipulator; private File checkoutScmDirectory = null; private ScmSyncConfigurationStatusManager scmSyncConfigurationStatusManager = null; /** * Use of a size 1 thread pool frees us from worrying about accidental thread death and * changeset commit concurrency */ /*package*/ final ExecutorService writer = Executors.newFixedThreadPool(1, new DaemonThreadFactory()); // TODO: Refactor this into the plugin object ??? private List<Commit> commitsQueue = Collections.synchronizedList(new ArrayList<Commit>()); public ScmSyncConfigurationBusiness(){ } public ScmSyncConfigurationStatusManager getScmSyncConfigurationStatusManager() { if (scmSyncConfigurationStatusManager == null) { scmSyncConfigurationStatusManager = new ScmSyncConfigurationStatusManager(); } return scmSyncConfigurationStatusManager; } public void init(ScmContext scmContext) throws ComponentLookupException, PlexusContainerException { ScmManager scmManager = SCMManagerFactory.getInstance().createScmManager(); this.scmManipulator = new SCMManipulator(scmManager); this.checkoutScmDirectory = new File(getCheckoutScmDirectoryAbsolutePath()); this.checkoutSucceeded = false; initializeRepository(scmContext, false); } public void initializeRepository(ScmContext scmContext, boolean deleteCheckoutScmDir){ // Let's check if everything is available to checkout sources if(scmManipulator != null && scmManipulator.scmConfigurationSettledUp(scmContext, true)){ LOGGER.info("Initializing SCM repository for scm-sync-configuration plugin ..."); // If checkoutScmDirectory was not empty and deleteCheckoutScmDir is asked, reinitialize it ! if(deleteCheckoutScmDir){ cleanChekoutScmDirectory(); } // Creating checkout scm directory if(!checkoutScmDirectory.exists()){ try { FileUtils.forceMkdir(checkoutScmDirectory); LOGGER.info("Directory ["+ checkoutScmDirectory.getAbsolutePath() +"] created !"); } catch (IOException e) { LOGGER.warning("Directory ["+ checkoutScmDirectory.getAbsolutePath() +"] cannot be created !"); } } this.checkoutSucceeded = this.scmManipulator.checkout(this.checkoutScmDirectory); if(this.checkoutSucceeded){ LOGGER.info("SCM repository initialization done."); } signal("Checkout " + this.checkoutScmDirectory, this.checkoutSucceeded); } } public void cleanChekoutScmDirectory(){ if(checkoutScmDirectory != null && checkoutScmDirectory.exists()){ LOGGER.info("Deleting old checkout SCM directory ..."); try { FileUtils.forceDelete(checkoutScmDirectory); } catch (IOException e) { LOGGER.throwing(FileUtils.class.getName(), "forceDelete", e); LOGGER.severe("Error while deleting ["+checkoutScmDirectory.getAbsolutePath()+"] : "+e.getMessage()); } this.checkoutSucceeded = false; } } public List<File> deleteHierarchy(ScmContext scmContext, String hierarchyPath){ if(scmManipulator == null || !scmManipulator.scmConfigurationSettledUp(scmContext, false)){ return null; } File rootHierarchyTranslatedInScm = new File(getCheckoutScmDirectoryAbsolutePath()+File.separator+hierarchyPath); List<File> filesToCommit = scmManipulator.deleteHierarchy(rootHierarchyTranslatedInScm); signal("Delete " + hierarchyPath, filesToCommit != null); return filesToCommit; } public Future<Void> queueChangeSet(final ScmContext scmContext, ChangeSet changeset, User user, String userMessage) { if(scmManipulator == null || !scmManipulator.scmConfigurationSettledUp(scmContext, false)){ LOGGER.info("Queue of changeset "+changeset.toString()+" aborted (scm manipulator not settled !)"); return null; } Commit commit = new Commit(changeset, user, userMessage, scmContext); LOGGER.info("Queuing commit "+commit.toString()+" to SCM ..."); commitsQueue.add(commit); return writer.submit(new Callable<Void>() { public Void call() throws Exception { processCommitsQueue(); return null; } }); } private void processCommitsQueue() { File scmRoot = new File(getCheckoutScmDirectoryAbsolutePath()); // Copying shared commitQueue in order to allow conccurrent modification List<Commit> currentCommitQueue = new ArrayList<Commit>(commitsQueue); List<Commit> checkedInCommits = new ArrayList<Commit>(); try { // Reading commit queue and commiting changeset for(Commit commit: currentCommitQueue){ String logMessage = "Processing commit : " + commit.toString(); LOGGER.info(logMessage); // Preparing files to add / delete List<File> synchronizedFiles = new ArrayList<File>(); for(Map.Entry<Path,byte[]> pathContent : commit.getChangeset().getPathContents().entrySet()){ Path pathRelativeToJenkinsRoot = pathContent.getKey(); byte[] content = pathContent.getValue(); File fileTranslatedInScm = new File(getCheckoutScmDirectoryAbsolutePath()+File.separator+pathRelativeToJenkinsRoot.getPath()); boolean fileAlreadySynchronized = fileTranslatedInScm.exists(); if(!fileAlreadySynchronized){ createScmContent(pathRelativeToJenkinsRoot, content, fileTranslatedInScm); synchronizedFiles.addAll(scmManipulator.addFile(scmRoot, pathRelativeToJenkinsRoot.getPath())); } else { if(!fileTranslatedInScm.isDirectory()){ if(writeScmContentOnlyIfItDiffers(content, fileTranslatedInScm)){ synchronizedFiles.add(fileTranslatedInScm); } } else { synchronizedFiles.add(fileTranslatedInScm); } } } for(Path path : commit.getChangeset().getPathsToDelete()){ List<File> deletedFiles = deleteHierarchy(commit.getScmContext(), path.getPath()); synchronizedFiles.addAll(deletedFiles); } if(synchronizedFiles.isEmpty()){ LOGGER.info("Empty changeset to commit (no changes found on files) => commit skipped !"); } else { // Commiting files... boolean result = scmManipulator.checkinFiles(scmRoot, commit.getMessage()); if(result){ LOGGER.info("Commit "+commit.toString()+" pushed to SCM !"); checkedInCommits.add(commit); } else { throw new LoggableException("Error while checking in file to scm repository", SCMManipulator.class, "checkinFiles"); } signal(logMessage, true); } } // As soon as a commit doesn't goes well, we should abort commit queue processing... }catch(LoggableException e){ LOGGER.throwing(e.getClazz().getName(), e.getMethodName(), e); LOGGER.severe("Error while processing commit queue : "+e.getMessage()); signal(e.getMessage(), false); } finally { // We should remove every checkedInCommits commitsQueue.removeAll(checkedInCommits); } } private boolean writeScmContentOnlyIfItDiffers(byte[] content, File fileTranslatedInScm) throws LoggableException { boolean scmContentUpdated = false; boolean contentDiffer = false; try { contentDiffer = !Checksums.fileAndByteArrayContentAreEqual(fileTranslatedInScm, content); } catch (IOException e) { throw new LoggableException("Error while checking content checksum", Checksums.class, "fileAndByteArrayContentAreEqual", e); } if(contentDiffer){ try { Files.write(content, fileTranslatedInScm); scmContentUpdated = true; } catch (IOException e) { throw new LoggableException("Error while copying content to scm directory", Files.class, "write", e); } } else { // Don't do anything } return scmContentUpdated; } private void createScmContent(Path pathRelativeToJenkinsRoot, byte[] content, File fileTranslatedInScm) throws LoggableException { Stack<File> directoriesToCreate = new Stack<File>(); File directory = fileTranslatedInScm.getParentFile(); // Eventually, creating non existing enclosing directories while(!directory.exists()){ directoriesToCreate.push(directory); directory = directory.getParentFile(); } while(!directoriesToCreate.empty()){ directory = directoriesToCreate.pop(); if(!directory.mkdir()){ throw new LoggableException("Error while creating directory "+directory.getAbsolutePath(), File.class, "mkdir"); } } try { // Copying content if pathRelativeToJenkinsRoot is a file, or creating the directory if it is a directory if(pathRelativeToJenkinsRoot.isDirectory()){ if(!fileTranslatedInScm.mkdir()){ throw new LoggableException("Error while creating directory "+fileTranslatedInScm.getAbsolutePath(), File.class, "mkdir"); } } else { Files.write(content, fileTranslatedInScm); } } catch (IOException e) { throw new LoggableException("Error while creating file in checkouted directory", Files.class, "write", e); } } public void synchronizeAllConfigs(ScmSyncStrategy[] availableStrategies){ List<File> filesToSync = new ArrayList<File>(); // Building synced files from strategies for(ScmSyncStrategy strategy : availableStrategies){ filesToSync.addAll(strategy.createInitializationSynchronizedFileset()); } ScmSyncConfigurationPlugin plugin = ScmSyncConfigurationPlugin.getInstance(); plugin.startThreadedTransaction(); try { for(File fileToSync : filesToSync){ String hudsonConfigPathRelativeToHudsonRoot = JenkinsFilesHelper.buildPathRelativeToHudsonRoot(fileToSync); plugin.getTransaction().defineCommitMessage(new WeightedMessage("Repository initialization", MessageWeight.IMPORTANT)); plugin.getTransaction().registerPath(hudsonConfigPathRelativeToHudsonRoot); } } finally { plugin.getTransaction().commit(); } } public boolean scmCheckoutDirectorySettledUp(ScmContext scmContext){ return scmManipulator != null && this.scmManipulator.scmConfigurationSettledUp(scmContext, false) && this.checkoutSucceeded; } public List<File> reloadAllFilesFromScm() throws IOException, ScmException { this.scmManipulator.update(new File(getCheckoutScmDirectoryAbsolutePath())); return syncDirectories(new File(getCheckoutScmDirectoryAbsolutePath() + File.separator), ""); } private List<File> syncDirectories(File from, String relative) throws IOException { List<File> l = new ArrayList<File>(); for(File f : from.listFiles()) { String newRelative = relative + File.separator + f.getName(); File jenkinsFile = new File(Hudson.getInstance().getRootDir() + newRelative); if (f.getName().equals(scmManipulator.getScmSpecificFilename())) { // nothing to do } else if (f.isDirectory()) { if (!jenkinsFile.exists()) { FileUtils.copyDirectory(f, jenkinsFile, new FileFilter() { public boolean accept(File f) { return !f.getName().equals(scmManipulator.getScmSpecificFilename()); } }); l.add(jenkinsFile); } else { l.addAll(syncDirectories(f, newRelative)); } } else { if (!jenkinsFile.exists() || !FileUtils.contentEquals(f, jenkinsFile)) { FileUtils.copyFile(f, jenkinsFile); l.add(jenkinsFile); } } } return l; } private void signal(String operation, boolean result) { if (result) { getScmSyncConfigurationStatusManager().signalSuccess(); } else { getScmSyncConfigurationStatusManager().signalFailed(operation); } } public static String getCheckoutScmDirectoryAbsolutePath(){ return Hudson.getInstance().getRootDir().getAbsolutePath()+WORKING_DIRECTORY_PATH+CHECKOUT_SCM_DIRECTORY; } }
package org.dasein.cloud.cloudsigma.network.firewall; import org.apache.log4j.Logger; import org.dasein.cloud.CloudException; import org.dasein.cloud.InternalException; import org.dasein.cloud.OperationNotSupportedException; import org.dasein.cloud.ProviderContext; import org.dasein.cloud.Requirement; import org.dasein.cloud.ResourceStatus; import org.dasein.cloud.Tag; import org.dasein.cloud.cloudsigma.CloudSigma; import org.dasein.cloud.cloudsigma.CloudSigmaConfigurationException; import org.dasein.cloud.cloudsigma.CloudSigmaMethod; import org.dasein.cloud.cloudsigma.NoContextException; import org.dasein.cloud.identity.ServiceAction; import org.dasein.cloud.network.Direction; import org.dasein.cloud.network.Firewall; import org.dasein.cloud.network.FirewallCreateOptions; import org.dasein.cloud.network.FirewallRule; import org.dasein.cloud.network.AbstractFirewallSupport; import org.dasein.cloud.network.Permission; import org.dasein.cloud.network.Protocol; import org.dasein.cloud.network.RuleTarget; import org.dasein.cloud.network.RuleTargetType; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import javax.annotation.Nonnegative; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Locale; public class ServerFirewallSupport extends AbstractFirewallSupport { static private final Logger logger = CloudSigma.getLogger(ServerFirewallSupport.class); private CloudSigma provider; public ServerFirewallSupport(@Nonnull CloudSigma provider) { super(provider); this.provider = provider; } @Nonnull @Override public String authorize(@Nonnull String firewallId, @Nonnull Direction direction, @Nonnull Permission permission, @Nonnull RuleTarget sourceEndpoint, @Nonnull Protocol protocol, @Nonnull RuleTarget destinationEndpoint, int beginPort, int endPort, @Nonnegative int precedence) throws CloudException, InternalException { if (sourceEndpoint.getRuleTargetType() != RuleTargetType.CIDR) { throw new OperationNotSupportedException("Target type "+sourceEndpoint.getRuleTargetType()+" for sourceEndpoint not supported in CloudSigma"); } if (destinationEndpoint.getRuleTargetType() != RuleTargetType.CIDR) { throw new OperationNotSupportedException("Target type "+destinationEndpoint.getRuleTargetType()+" for destinationEndpoint not supported in CloudSigma"); } CloudSigmaMethod method = new CloudSigmaMethod(provider); try{ JSONObject fw = new JSONObject(method.getString(toFirewallURL(firewallId, ""))); JSONArray rules = fw.getJSONArray("rules"); JSONObject rule = new JSONObject(); rule.put("action", (permission == Permission.ALLOW ? "accept" : "drop")); rule.put("direction", (direction == Direction.INGRESS ? "in" : "out")); rule.put("dst_ip", destinationEndpoint.getCidr()); rule.put("dst_port", String.valueOf(beginPort)+((endPort >= 0 && endPort!=beginPort) ? ":"+String.valueOf(endPort) : "")); rule.put("ip_proto", (protocol == Protocol.TCP ? "tcp" : "udp")); rule.put("src_ip", sourceEndpoint.getCidr() ); rules.put(rule); String firewallObj = method.putString(toFirewallURL(firewallId, ""), fw.toString()); if (firewallObj != null) { FirewallRule newRule = FirewallRule.getInstance(null, firewallId, sourceEndpoint, direction, protocol, permission, destinationEndpoint, beginPort, endPort); return newRule.getProviderRuleId(); } } catch (JSONException e) { throw new InternalException(e); } throw new CloudException("Firewall rule created but not found in response"); } @Nonnull @Override public String create(@Nonnull FirewallCreateOptions options) throws InternalException, CloudException { if (options.getProviderVlanId() == null) { CloudSigmaMethod method = new CloudSigmaMethod(provider); try { JSONObject body = new JSONObject(), fwName = new JSONObject(); JSONArray objects = new JSONArray(); fwName.put("name", options.getName()); objects.put(fwName); body.put("objects", objects); JSONObject fwObj = new JSONObject(method.postString("/fwpolicies/", body.toString())); Firewall firewall = null; if (fwObj != null) { JSONArray arr = fwObj.getJSONArray("objects"); JSONObject fw = arr.getJSONObject(0); firewall = toFirewall(fw); } if (firewall == null) { throw new CloudException("Firewall created but no information was provided"); } return firewall.getProviderFirewallId(); } catch (JSONException e) { throw new InternalException(e); } } else { throw new OperationNotSupportedException("Vlan firewall creation not supported"); } } @Override public void delete(@Nonnull String s) throws InternalException, CloudException { throw new OperationNotSupportedException("Deleting firewalls is not supported in CloudSigma api"); } @Nullable @Override public Firewall getFirewall(@Nonnull String firewallId) throws InternalException, CloudException { if (firewallId.length() > 0) { CloudSigmaMethod method = new CloudSigmaMethod(provider); try { String fwObj = method.getString(toFirewallURL(firewallId, "")); if (fwObj != null) { return toFirewall(new JSONObject(fwObj)); } return null; } catch (JSONException e) { throw new InternalException(e); } } else { throw new InternalException("Firewall id is null/empty!"); } } @Nonnull @Override public String getProviderTermForFirewall(@Nonnull Locale locale) { return "firewall policy"; } @Nonnull @Override public Collection<FirewallRule> getRules(@Nonnull String firewallId) throws InternalException, CloudException { ArrayList<FirewallRule> list = new ArrayList<FirewallRule>(); if (firewallId.length() > 0) { CloudSigmaMethod method = new CloudSigmaMethod(provider); try { String fwObj = method.getString(toFirewallURL(firewallId, "")); if (fwObj != null) { JSONObject firewall = new JSONObject(fwObj); JSONArray matches = firewall.getJSONArray("rules"); for (int i= 0; i<matches.length(); i++) { FirewallRule rule = toFirewallRule(matches.getJSONObject(i), firewallId); if (rule != null) { list.add(rule); } } } return list; } catch (JSONException e) { throw new InternalException(e); } } else { throw new InternalException("Firewall id is null/empty!"); } } @Nonnull @Override public Requirement identifyPrecedenceRequirement(boolean inVlan) throws InternalException, CloudException { return Requirement.NONE; } @Override public boolean isSubscribed() throws CloudException, InternalException { return true; } @Override public boolean isZeroPrecedenceHighest() throws InternalException, CloudException { return true; } @Nonnull @Override public Collection<Firewall> list() throws InternalException, CloudException { ArrayList<Firewall> list = new ArrayList<Firewall>(); CloudSigmaMethod method = new CloudSigmaMethod(provider); boolean moreData = true; String baseTarget = "/fwpolicies/detail/"; String target = ""; while(moreData) { target = baseTarget+target; try { JSONObject json = method.list(target); if (json == null) { throw new CloudException("No firewall endpoint was found"); } JSONArray objects = json.getJSONArray("objects"); for (int i = 0; i < objects.length(); i++) { JSONObject jObj = objects.getJSONObject(i); Firewall fw = toFirewall(jObj); if (fw != null) { list.add(fw); } } //dmayne 20130314: check if there are more pages if (json.has("meta")) { JSONObject meta = json.getJSONObject("meta"); if (meta.has("next") && !(meta.isNull("next")) && !meta.getString("next").equals("")) { target = meta.getString("next"); target = target.substring(target.indexOf("?")); moreData = true; } else { moreData = false; } } } catch (JSONException e) { throw new InternalException(e); } } return list; } @Nonnull @Override public Iterable<ResourceStatus> listFirewallStatus() throws InternalException, CloudException { ArrayList<ResourceStatus> list = new ArrayList<ResourceStatus>(); CloudSigmaMethod method = new CloudSigmaMethod(provider); boolean moreData = true; String baseTarget = "/fwpolicies/"; String target = "?fields=uuid"; while(moreData) { target = baseTarget+target; try { JSONObject json = method.list(target); if (json == null) { throw new CloudException("No firewall endpoint was found"); } JSONArray objects = json.getJSONArray("objects"); for (int i = 0; i < objects.length(); i++) { JSONObject jObj = objects.getJSONObject(i); ResourceStatus fw = toFirewallStatus(jObj); if (fw != null) { list.add(fw); } } //dmayne 20130314: check if there are more pages if (json.has("meta")) { JSONObject meta = json.getJSONObject("meta"); if (meta.has("next") && !(meta.isNull("next")) && !meta.getString("next").equals("")) { target = meta.getString("next"); target = target.substring(target.indexOf("?")); moreData = true; } else { moreData = false; } } } catch (JSONException e) { throw new InternalException(e); } } return list; } @Nonnull @Override public Iterable<RuleTargetType> listSupportedDestinationTypes(boolean inVlan) throws InternalException, CloudException { if (!inVlan) { Collection<RuleTargetType> destTypes = new ArrayList<RuleTargetType>(); destTypes.add(RuleTargetType.CIDR); return destTypes; } return Collections.emptyList(); } @Nonnull @Override public Iterable<Direction> listSupportedDirections(boolean inVlan) throws InternalException, CloudException { if (!inVlan) { ArrayList<Direction> list = new ArrayList<Direction>(); list.add(Direction.EGRESS); list.add(Direction.INGRESS); return list; } return Collections.emptyList(); } @Nonnull @Override public Iterable<Permission> listSupportedPermissions(boolean inVlan) throws InternalException, CloudException { if (!inVlan) { ArrayList<Permission> list = new ArrayList<Permission>(); list.add(Permission.ALLOW); list.add(Permission.DENY); return list; } return Collections.emptyList(); } @Nonnull @Override public Iterable<RuleTargetType> listSupportedSourceTypes(boolean inVlan) throws InternalException, CloudException { if (!inVlan) { Collection<RuleTargetType> sourceTypes = new ArrayList<RuleTargetType>(); sourceTypes.add(RuleTargetType.CIDR); return sourceTypes; } return Collections.emptyList(); } @Nonnull @Override public String[] mapServiceAction(@Nonnull ServiceAction action) { return new String[0]; //To change body of overridden methods use File | Settings | File Templates. } @Override public void removeTags(@Nonnull String volumeId, @Nonnull Tag... tags) throws CloudException, InternalException { //To change body of overridden methods use File | Settings | File Templates. } @Override public void removeTags(@Nonnull String[] vmIds, @Nonnull Tag... tags) throws CloudException, InternalException { //To change body of overridden methods use File | Settings | File Templates. } @Override public void revoke(@Nonnull String providerFirewallRuleId) throws InternalException, CloudException { FirewallRule rule = null; for( Firewall f : list() ) { String fwId = f.getProviderFirewallId(); if( fwId != null ) { for( FirewallRule r : getRules(fwId) ) { if( providerFirewallRuleId.equals(r.getProviderRuleId()) ) { rule = r; break; } } } } if( rule == null ) { throw new CloudException("Unable to parse rule ID: " + providerFirewallRuleId); } revoke(providerFirewallRuleId, rule.getFirewallId()); } @Override public void revoke(@Nonnull String firewallId, @Nonnull Direction direction, @Nonnull String source, @Nonnull Protocol protocol, int beginPort, int endPort) throws CloudException, InternalException { revoke(firewallId, direction, Permission.ALLOW, source, protocol, RuleTarget.getGlobal(firewallId), beginPort, endPort); } @Override public void revoke(@Nonnull String firewallId, @Nonnull Direction direction, @Nonnull Permission permission, @Nonnull String source, @Nonnull Protocol protocol, int beginPort, int endPort) throws CloudException, InternalException { revoke(firewallId, direction, permission, source, protocol, RuleTarget.getGlobal(firewallId), beginPort, endPort); } @Override public void revoke(@Nonnull String firewallId, @Nonnull Direction direction, @Nonnull Permission permission, @Nonnull String source, @Nonnull Protocol protocol, @Nonnull RuleTarget target, int beginPort, int endPort) throws CloudException, InternalException { String tmpRuleId = FirewallRule.getRuleId(firewallId, RuleTarget.getCIDR(source), direction, protocol, permission, target, beginPort, endPort); revoke(tmpRuleId, firewallId); } private void revoke(@Nonnull String ruleId, @Nonnull String firewallId) throws CloudException, InternalException { CloudSigmaMethod method = new CloudSigmaMethod(provider); JSONArray newArray = new JSONArray(); try{ JSONObject fw = new JSONObject(method.getString(toFirewallURL(firewallId, ""))); JSONArray rules = fw.getJSONArray("rules"); for (int i = 0; i<rules.length(); i++) { JSONObject rule = rules.getJSONObject(i); FirewallRule r = toFirewallRule(rule, firewallId); if (!r.getProviderRuleId().equalsIgnoreCase(ruleId)) { newArray.put(rule); } } fw.put("rules", newArray); String jsonBody = fw.toString(); if (method.putString(toFirewallURL(firewallId, ""), jsonBody) == null) { throw new CloudException("Unable to locate firewall endpoint in CloudSigma"); } } catch (JSONException e) { throw new InternalException(e); } } @Override public boolean supportsRules(@Nonnull Direction direction, @Nonnull Permission permission, boolean inVlan) throws CloudException, InternalException { if (!inVlan) { return true; } return false; } @Override public boolean supportsFirewallCreation(boolean inVlan) throws CloudException, InternalException { if (!inVlan) { return true; } return false; } @Override public boolean supportsFirewallSources() throws CloudException, InternalException { return false; } @Override public void updateTags(@Nonnull String volumeId, @Nonnull Tag... tags) throws CloudException, InternalException { //To change body of overridden methods use File | Settings | File Templates. } @Override public void updateTags(@Nonnull String[] vmIds, @Nonnull Tag... tags) throws CloudException, InternalException { //To change body of overridden methods use File | Settings | File Templates. } private Firewall toFirewall(JSONObject fw) throws CloudException, InternalException{ if (fw == null) { return null; } ProviderContext ctx = provider.getContext(); if (ctx == null) { throw new NoContextException(); } String regionId = ctx.getRegionId(); if (regionId == null) { throw new CloudSigmaConfigurationException("No region was specified for this request"); } Firewall firewall = new Firewall(); try { String fwId = fw.getString("uuid"); firewall.setProviderFirewallId(fwId); if (fw.has("name") && !fw.isNull("name")) { String name = fw.getString("name"); if (name != null) { firewall.setName(name); firewall.setDescription(name); } } firewall.setActive(true); firewall.setAvailable(true); firewall.setRegionId(regionId); } catch (JSONException e) { throw new InternalException(e); } return firewall; } private ResourceStatus toFirewallStatus(JSONObject fw) throws CloudException, InternalException{ if (fw == null) { return null; } ProviderContext ctx = provider.getContext(); if (ctx == null) { throw new NoContextException(); } String regionId = ctx.getRegionId(); if (regionId == null) { throw new CloudSigmaConfigurationException("No region was specified for this request"); } String fwId; try { fwId = fw.getString("uuid"); } catch (JSONException e) { throw new InternalException(e); } return new ResourceStatus(fwId, true); } private FirewallRule toFirewallRule(JSONObject fwRule, String fwID) throws CloudException, InternalException{ if (fwRule == null) { return null; } ProviderContext ctx = provider.getContext(); if (ctx == null) { throw new NoContextException(); } String regionId = ctx.getRegionId(); if (regionId == null) { throw new CloudSigmaConfigurationException("No region was specified for this request"); } String providerFirewallId = fwID; RuleTarget sourceEndpoint = null; Direction direction = null; Protocol protocol = null; Permission permission = null; RuleTarget destEndpoint = null; int startPort = -1; int endPort = -1; try { if (fwRule.has("src_ip") && !fwRule.isNull("src_ip")) { String sourceIP = fwRule.getString("src_ip"); sourceEndpoint = RuleTarget.getCIDR(sourceIP); } if (fwRule.has("direction")) { String dir = fwRule.getString("direction"); direction = (dir.equalsIgnoreCase("in") ? Direction.INGRESS : Direction.EGRESS); } if (fwRule.has("ip_proto") && !fwRule.isNull("ip_proto")) { String proto = fwRule.getString("ip_proto"); if (proto.equalsIgnoreCase("tcp")) { protocol = Protocol.TCP; } else if (proto.equalsIgnoreCase("udp")) { protocol = Protocol.UDP; } } if (fwRule.has("action")) { String action = fwRule.getString("action"); if (action.equalsIgnoreCase("accept")) { permission = Permission.ALLOW; } else if (action.equalsIgnoreCase("drop")) { permission = Permission.DENY; } } if (fwRule.has("dst_ip") && !fwRule.isNull("dst_ip")) { String destIP = fwRule.getString("dst_ip"); destEndpoint = RuleTarget.getCIDR(destIP); } if (fwRule.has("dst_port") && !fwRule.isNull("dst_port")) { String destPort = fwRule.getString("dst_port"); if (destPort.indexOf(":") > -1) { startPort = Integer.parseInt(destPort.substring(0, destPort.indexOf(":"))); endPort = Integer.parseInt(destPort.substring(destPort.indexOf(":")+1, destPort.length())); } else { startPort = Integer.parseInt(destPort); endPort = startPort; } } if(sourceEndpoint == null) { sourceEndpoint = RuleTarget.getGlobal(providerFirewallId); } if(destEndpoint == null) { destEndpoint = RuleTarget.getGlobal(providerFirewallId); } } catch (JSONException e) { throw new InternalException(e); } FirewallRule newRule = FirewallRule.getInstance(null, providerFirewallId, sourceEndpoint, direction, protocol, permission, destEndpoint, startPort, endPort); return newRule; } private @Nonnull String toFirewallURL(@Nonnull String firewallId, @Nonnull String action) throws InternalException { try { return ("/fwpolicies/" + URLEncoder.encode(firewallId, "utf-8") + "/" + action); } catch (UnsupportedEncodingException e) { logger.error("UTF-8 not supported: " + e.getMessage()); throw new InternalException(e); } } }
package org.spongepowered.api.event.entity.living; import org.spongepowered.api.entity.living.Living; import org.spongepowered.api.event.entity.EntityInteractBlockEvent; import org.spongepowered.api.world.Location; /** * Called when a {@link Living} interacts with a {@link Location}. */ public interface LivingInteractBlockEvent extends LivingInteractEvent, EntityInteractBlockEvent { /** * Checks if this is a flowerpot. * * @return Whether this is a flowerpot */ boolean isFlowerPot(); }
// Getdown - application installer, patcher and launcher // This program is free software; you can redistribute it and/or modify it // any later version. // This program is distributed in the hope that it will be useful, but WITHOUT // more details. // this program; if not, write to the: Free Software Foundation, Inc., // 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA package com.threerings.getdown.launcher; import java.awt.Container; import java.awt.Image; import java.net.URL; import java.security.GeneralSecurityException; import java.security.Signature; import java.security.cert.Certificate; import javax.swing.JApplet; import javax.swing.JPanel; import java.io.IOException; import java.io.FileOutputStream; import java.io.File; import java.io.PrintStream; import netscape.javascript.JSObject; import org.apache.commons.codec.binary.Base64; import com.samskivert.util.RunAnywhere; import com.samskivert.util.StringUtil; import com.threerings.getdown.Log; /** * An applet that can be used to launch a Getdown application (when signed and * given privileges). */ public class GetdownApplet extends JApplet { @Override // documentation inherited public void init () { // First off, verify that we are not being hijacked to execute // malicious code in the name of the signer. String appbase = getParameter("appbase"); String appname = getParameter("appname"); String imgpath = getParameter("bgimage"); if (appbase == null) { appbase = ""; } if (appname == null) { appname = ""; } if (imgpath == null) { imgpath = ""; } String params = appbase + appname + imgpath; String signature = getParameter("signature"); if (signature == null) { signature = ""; } Object[] signers = GetdownApplet.class.getSigners(); if (signers.length == 0) { _safe = true; } for (Object signer : signers) { if (!_safe && signer instanceof Certificate) { Certificate cert = (Certificate)signer; try { Signature sig = Signature.getInstance("SHA1withRSA"); sig.initVerify(cert); sig.update(params.getBytes()); if (sig.verify(Base64.decodeBase64( signature.getBytes()))) { _safe = true; } } catch (GeneralSecurityException gse) { // ignore the error - the default is to not launch. } } } if (!_safe) { Log.warning("Signed getdown invoked on unsigned application; " + "aborting installation."); return; } // Pass through properties parameter. String properties = getParameter("properties"); if (properties != null) { String[] proparray = properties.split(" "); for (String property : proparray) { String key = property.substring(property.indexOf("-D") + 2, property.indexOf("=")); String value = property.substring(property.indexOf("=") + 1); System.setProperty(key, value); } } // when run from an applet, we install String root; if (RunAnywhere.isWindows()) { root = "Application Data"; } else if (RunAnywhere.isMacOS()) { root = "Library" + File.separator + "Application Support"; } else /* isLinux() or something wacky */ { root = ".getdown"; } String appdir = System.getProperty("user.home") + File.separator + root + File.separator + appname; // if our application directory does not exist, auto-create it File appDir = new File(appdir); if (!appDir.exists() || !appDir.isDirectory()) { if (!appDir.mkdirs()) { Log.warning("Unable to create app_dir '" + appdir + "'."); // TODO: report error return; } } // if an installer.txt file is desired, create that String inststr = getParameter("installer"); if (!StringUtil.isBlank(inststr)) { File infile = new File(appDir, "installer.txt"); if (!infile.exists()) { writeToFile(infile, inststr); } } // if our getdown.txt file does not exist, auto-create it File gdfile = new File(appDir, "getdown.txt"); if (!gdfile.exists()) { if (StringUtil.isBlank(appbase)) { Log.warning("Missing 'appbase' cannot auto-create " + "application directory."); // TODO: report return; } if (!writeToFile(gdfile, "appbase = " + appbase)) { // TODO: report the error return; } } // if a background image was specified, grabbit try { if (!StringUtil.isBlank(imgpath)) { _bgimage = getImage(new URL(getDocumentBase(), imgpath)); } } catch (Exception e) { Log.info("Failed to load background image [path=" + imgpath + "]."); Log.logStackTrace(e); } // record a few things for posterity Log.info(" Log.info("-- OS Name: " + System.getProperty("os.name")); Log.info("-- OS Arch: " + System.getProperty("os.arch")); Log.info("-- OS Vers: " + System.getProperty("os.version")); Log.info("-- Java Vers: " + System.getProperty("java.version")); Log.info("-- Java Home: " + System.getProperty("java.home")); Log.info("-- User Name: " + System.getProperty("user.name")); Log.info("-- User Home: " + System.getProperty("user.home")); Log.info("-- Cur dir: " + System.getProperty("user.dir")); Log.info(" try { _getdown = new Getdown(appDir, null) { protected Container createContainer () { getContentPane().removeAll(); return getContentPane(); } protected void showContainer () { ((JPanel)getContentPane()).revalidate(); } protected void disposeContainer () { // nothing to do as we're in an applet } protected boolean invokeDirect () { return "true".equalsIgnoreCase(getParameter("direct")); } protected JApplet getApplet () { return GetdownApplet.this; } protected Image getBackgroundImage () { return _bgimage == null ? super.getBackgroundImage() : _bgimage; } protected void exit (int exitCode) { // don't exit as we're in an applet } @Override // documentation inherited protected void setStatus (final String message, final int percent, final long remaining, boolean createUI) { super.setStatus(message, percent, remaining, createUI); JSObject.getWindow(GetdownApplet.this).call("getdownStatus", new Object[] {message, percent, remaining}); } }; // set up our user interface immediately _getdown.preInit(); } catch (Exception e) { Log.logStackTrace(e); } } @Override // documentation inherited public void start () { if (!_safe) { return; } try { _getdown.start(); } catch (Exception e) { Log.logStackTrace(e); } } @Override // documentation inherited public void stop () { // TODO } /** * Creates the specified file and writes the supplied contents to it. */ protected boolean writeToFile (File tofile, String contents) { try { PrintStream out = new PrintStream(new FileOutputStream(tofile)); out.println(contents); out.close(); return true; } catch (IOException ioe) { Log.warning("Failed to create '" + tofile + "'."); Log.logStackTrace(ioe); return false; } } protected Getdown _getdown; protected Image _bgimage; /** * Getdown will refuse to initialize if the jar is signed but the * parameters are not validated to prevent malicious code from being run. */ protected boolean _safe = false; }
package de.longor.talecraft.commands; import java.util.Collections; import java.util.List; import de.longor.talecraft.TaleCraft; import de.longor.talecraft.network.StringNBTCommandPacket; import de.longor.talecraft.util.CommandArgumentParser; import net.minecraft.client.entity.EntityPlayerSP; import net.minecraft.command.CommandException; import net.minecraft.command.EntitySelector; import net.minecraft.command.ICommandSender; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityLiving; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.potion.Potion; import net.minecraft.potion.PotionEffect; import net.minecraft.server.MinecraftServer; import net.minecraft.util.math.BlockPos; import net.minecraft.util.text.TextComponentString; import net.minecraft.util.text.TextFormatting; public class HighlightCommand extends TCCommandBase { @Override public String getCommandName() { return "tc_highlight"; } @Override public String getCommandUsage(ICommandSender sender) { return "< ? >"; } @Override public void execute(MinecraftServer server, ICommandSender sender, String[] args) throws CommandException { CommandArgumentParser parser = new CommandArgumentParser(args); parser.commandSenderPosition = sender.getPositionVector(); String action = parser.consume_string("Couldn't parse highlight action!"); if(action.equals("clear")) { // TODO: clear highlights! sender.addChatMessage(new TextComponentString(TextFormatting.RED+"ERROR: highlight clearing not yet implemented.")); return; } if(action.equals("entity")) { // highlight a entity/multiple entities double duration = parser.consume_double("Couldn't parse duration!", 0.0000000001d, 10d); String selector = parser.consume_string("Couldn't parse entity selector!"); List<Entity> entities = EntitySelector.matchEntities(sender, selector, EntityPlayerSP.class); Potion potion = Potion.getPotionFromResourceLocation("minecraft:glow"); PotionEffect effect = new PotionEffect(potion, (int) duration, 1); for(Entity ent : entities) { if(ent instanceof EntityLiving) { ((EntityLiving) ent).addPotionEffect(effect); } } return; } if(action.equals("block")) { // highlight a block double duration = parser.consume_double("Couldn't parse duration!", 0.0000000001d, 10d); BlockPos blockPos = parser.consume_blockpos("Couldn't parse block position!"); NBTTagCompound pktdata = new NBTTagCompound(); pktdata.setString("type", "highlight.block"); pktdata.setInteger("pos.x", blockPos.getX()); pktdata.setInteger("pos.y", blockPos.getY()); pktdata.setInteger("pos.z", blockPos.getZ()); pktdata.setDouble("duration", duration); TaleCraft.network.sendToAll(new StringNBTCommandPacket("client.render.renderable.push", pktdata)); return; } } @Override public List<String> getTabCompletionOptions(MinecraftServer server, ICommandSender sender, String[] args, BlockPos pos) { if(args.length <= 1) { return getListOfStringsMatchingLastWord(args, new String[]{"entity","block","clear"}); } return Collections.emptyList(); } }
package org.apache.commons.net.ftp; import java.io.BufferedReader; import java.io.IOException; /** * FTPFileEntryParser defines the interface for parsing a single FTP file * listing and converting that information into an * <a href="org.apache.commons.net.ftp.FTPFile.html"> FTPFile </a> instance. * Sometimes you will want to parse unusual listing formats, in which * case you would create your own implementation of FTPFileEntryParser and * if necessary, subclass FTPFile. * <p> * Here is an example showing how to use one of the classes that * implement this interface. In the following example <code>parser </code> * is an object (in the package <code>org.apache.commons.net.ftp.parser</code>) * implementing this inteface. * * <pre> * FTPClient f=FTPClient(); * f.connect(server); * f.login(username, password); * FTPFileList list = createFTPFileList(directory, parser); * FTPFileIterator iter = list.iterator(); * * while (iter.hasNext()) { * FTPFile[] files = iter.getNext(25); // "page size" you want * //do whatever you want with these files, display them, etc. * //expensive FTPFile objects not created until needed. * } * </pre> * * @author <a href="mailto:scohen@apache.org">Steve Cohen</a> * @version $Id: FTPFileEntryParser.java,v 1.5 2003/12/30 04:04:11 scohen Exp $ * @see org.apache.commons.net.ftp.FTPFile * @see org.apache.commons.net.ftp.FTPClient#createFileList */ public interface FTPFileEntryParser { /** * Parses a line of an FTP server file listing and converts it into a usable * format in the form of an <code> FTPFile </code> instance. If the * file listing line doesn't describe a file, <code> null </code> should be * returned, otherwise a <code> FTPFile </code> instance representing the * files in the directory is returned. * <p> * @param listEntry A line of text from the file listing * @return An FTPFile instance corresponding to the supplied entry */ FTPFile parseFTPEntry(String listEntry); /** * Reads the next entry using the supplied BufferedReader object up to * whatever delemits one entry from the next. Implementors must define * this for the particular ftp system being parsed. In many but not all * cases, this can be defined simply by calling BufferedReader.readLine(). * * @param reader The BufferedReader object from which entries are to be * read. * * @return A string representing the next ftp entry or null if none found. * @exception IOException thrown on any IO Error reading from the reader. */ String readNextEntry(BufferedReader reader) throws IOException; }
package aeronicamc.mods.mxtune.blocks; import aeronicamc.mods.mxtune.init.ModSoundEvents; import aeronicamc.mods.mxtune.managers.PlayIdSupplier; import aeronicamc.mods.mxtune.managers.PlayManager; import aeronicamc.mods.mxtune.util.IInstrument; import aeronicamc.mods.mxtune.util.IWrenchAble; import aeronicamc.mods.mxtune.util.SheetMusicHelper; import net.minecraft.block.Block; import net.minecraft.block.BlockState; import net.minecraft.block.SoundType; import net.minecraft.block.material.Material; import net.minecraft.block.material.MaterialColor; import net.minecraft.client.util.ITooltipFlag; import net.minecraft.entity.LivingEntity; import net.minecraft.entity.item.ItemEntity; import net.minecraft.entity.player.PlayerEntity; import net.minecraft.entity.player.ServerPlayerEntity; import net.minecraft.inventory.ItemStackHelper; import net.minecraft.inventory.container.INamedContainerProvider; import net.minecraft.item.BlockItemUseContext; import net.minecraft.item.ItemStack; import net.minecraft.nbt.CompoundNBT; import net.minecraft.state.BooleanProperty; import net.minecraft.state.StateContainer; import net.minecraft.state.properties.BlockStateProperties; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.*; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.BlockRayTraceResult; import net.minecraft.util.math.shapes.ISelectionContext; import net.minecraft.util.math.shapes.VoxelShape; import net.minecraft.util.text.ITextComponent; import net.minecraft.util.text.StringTextComponent; import net.minecraft.util.text.TextFormatting; import net.minecraft.util.text.TranslationTextComponent; import net.minecraft.world.IBlockReader; import net.minecraft.world.IWorldReader; import net.minecraft.world.World; import net.minecraft.world.server.ServerWorld; import net.minecraftforge.api.distmarker.Dist; import net.minecraftforge.api.distmarker.OnlyIn; import net.minecraftforge.common.util.FakePlayerFactory; import net.minecraftforge.fml.network.NetworkHooks; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import javax.annotation.Nullable; import java.util.List; import java.util.Optional; import java.util.Random; import static net.minecraft.state.properties.BlockStateProperties.HORIZONTAL_FACING; import static net.minecraftforge.common.util.Constants.BlockFlags; import static net.minecraftforge.common.util.Constants.NBT; @SuppressWarnings("deprecation") public class MusicBlock extends Block implements IMusicPlayer, IWrenchAble { public static final BooleanProperty PLAYING = BooleanProperty.create("playing"); public static final BooleanProperty POWERED = BlockStateProperties.POWERED; private static final VoxelShape BOTTOM_AABB = Block.box(0.0D, 0.0D, 0.0D, 16.0D, 8.0D, 16.0D); private static final Logger LOGGER = LogManager.getLogger(MusicBlock.class); private static final Random rand = new Random(); public MusicBlock() { super(Properties.of(Material.NETHER_WOOD, MaterialColor.COLOR_BROWN) .sound(SoundType.WOOD) .strength(2.0F) .lightLevel(state -> state.getValue(PLAYING) ? 14 : 0) .noOcclusion()); this.registerDefaultState(this.defaultBlockState() .setValue(HORIZONTAL_FACING, Direction.NORTH) .setValue(PLAYING, Boolean.FALSE) .setValue(POWERED, Boolean.FALSE)); } @Override public VoxelShape getVisualShape(BlockState pState, IBlockReader pReader, BlockPos pPos, ISelectionContext pContext) { return BOTTOM_AABB; //VoxelShapes.empty(); } @Override public boolean useShapeForLightOcclusion(BlockState pState) { return true; } // Glass returns 1.0F here. Use 0.0F since this is like a bottom slab visually @OnlyIn(Dist.CLIENT) @Override public float getShadeBrightness(BlockState pState, IBlockReader pLevel, BlockPos pPos) { return 0.0F; } @Override public boolean propagatesSkylightDown(BlockState pState, IBlockReader pReader, BlockPos pPos) { return false; } @Override public void animateTick(BlockState pState, World pLevel, BlockPos pPos, Random pRand) { // if (pState.getValue(PLAYING)) // double d0 = (double)pPos.getX() + 0.5D; // double d1 = (double)pPos.getY() + 1.0625D; // double d2 = (double)pPos.getZ() + 0.5D; // double noteColor = rand.nextDouble(); // double d4 = pRand.nextDouble() * 0.4D - 0.2D; // double d5 = 1D * 0D; // double d6 = pRand.nextDouble() * 6.0D / 16.0D; // double d7 = 1D * 0D; // // TODO: come up with out own particles for the BandAmp :D // pLevel.addParticle(ModParticles.getSpeaker(), d0 + d4, d1 + d6, d2 + d4, 0.0D, 0.1D, 0.0D); // pLevel.addParticle(ParticleTypes.ASH, d0 + d5, d1 + d6, d2 + d7, 0.0D, 0.0D, 0.0D); } @Override public void tick(BlockState pState, ServerWorld pLevel, BlockPos pPos, Random pRand) { if (!pLevel.isClientSide()) { getMusicBlockEntity(pLevel, pPos).ifPresent( musicBlockEntity -> { if (pState.getValue(PLAYING)) { pLevel.getBlockTicks().scheduleTick(pPos, this, 20); if (PlayManager.getActiveBlockPlayId(pPos) == PlayIdSupplier.INVALID) { setPlayingState(pLevel, pPos, pState, false); onePulseOutputState(pLevel, pPos, pState, musicBlockEntity); } } else onePulseOutputState(pLevel, pPos, pState, musicBlockEntity); }); } } @Override public ActionResultType use(BlockState state, World worldIn, BlockPos pos, PlayerEntity player, Hand handIn, BlockRayTraceResult hit) { if (!worldIn.isClientSide()) { if (handleWrenchAble(state, worldIn, pos, player, handIn, hit)) return ActionResultType.SUCCESS; if (invertShiftIfLocked(player, worldIn, pos)) getMusicBlockEntity(worldIn, pos).ifPresent( musicBlockEntity -> { // Use spam prevention. // Server side: prevent runaway activation. // Limits activation to a single use even if held. // It's a shame to use ITickableTileEntity#ticks for this, // but I have not found another solution yet. if (musicBlockEntity.notHeld()) { boolean isPlaying = canPlayOrStopMusic(worldIn, state, pos, false); if (isPlaying) musicBlockEntity.setLastPlay(true); setPlayingState(worldIn, pos, state, isPlaying); } musicBlockEntity.useHeldCounterUpdate(true); }); else { TileEntity blockEntity = worldIn.getBlockEntity(pos); if (blockEntity instanceof INamedContainerProvider) NetworkHooks.openGui((ServerPlayerEntity) player, (INamedContainerProvider) blockEntity, blockEntity.getBlockPos()); else throw new IllegalStateException("Our named container provider is missing!"); } return ActionResultType.SUCCESS; } else return ActionResultType.CONSUME; } private boolean invertShiftIfLocked(PlayerEntity player, World level, BlockPos blockPos) { return LockableHelper.isLocked(FakePlayerFactory.getMinecraft((ServerWorld) level), level, blockPos) != player.isShiftKeyDown(); } private boolean canPlayOrStopMusic(World pLevel, BlockState pState, BlockPos pPos, Boolean noPlay) { int playId = PlayManager.getActiveBlockPlayId(pPos); if (PlayManager.isActivePlayId(playId) || pState.getValue(PLAYING)) { LOGGER.warn("STOP canPlayOrStopMusic playId {}", playId); PlayManager.stopPlayId(playId); return false; } if (!noPlay) { playId = PlayManager.playMusic(pLevel, pPos); LOGGER.warn("PLAY canPlayOrStopMusic playId {}", playId); return playId != PlayIdSupplier.INVALID && !pState.getValue(PLAYING); } return false; } private boolean handleWrenchAble(BlockState state, World level, BlockPos pos, PlayerEntity player, Hand handIn, BlockRayTraceResult hit) { MusicBlockEntity blockEntity = getMusicBlockEntity(level, pos).orElseThrow(() -> new IllegalStateException("Our Block Entity is missing!")); if (!level.isClientSide() && blockEntity.isOwner(player.getUUID()) && hasWrench(player, handIn)) { BlockState newState; if (player.isShiftKeyDown()) { // Pickup Music Block into player inventory. If inventory is full drop in the world. ItemStack itemStack = state.getBlock().getCloneItemStack(level, pos, state); if (!player.inventory.add(itemStack)) { ItemEntity itemEntity = new ItemEntity(level, pos.getX(), pos.getY(), pos.getZ(), itemStack); itemEntity.setDefaultPickUpDelay(); level.addFreshEntity(itemEntity); level.playSound(null, pos, SoundEvents.WOOD_BREAK, SoundCategory.BLOCKS, 1.0F, 1.0F); } level.destroyBlock(pos, false, player); } else { // Rotate Block Horizontally to the side hit. if (!hit.getDirection().equals(Direction.UP ) && !hit.getDirection().equals(Direction.DOWN)) { newState = state.setValue(HORIZONTAL_FACING, hit.getDirection()); //newState = state.rotate(level, pos, Rotation.CLOCKWISE_90); level.setBlockAndUpdate(pos, newState); level.playSound(null, pos, ModSoundEvents.ROTATE_BLOCK.get(), SoundCategory.BLOCKS, 0.6F, 1.0F); } else level.playSound(null, pos, ModSoundEvents.ROTATE_BLOCK_FAILED.get(), SoundCategory.BLOCKS, 0.2F, 1.0F); } return true; } return false; } private void onePulseOutputState(World pLevel, BlockPos pPos, BlockState pState, MusicBlockEntity musicBlockEntity) { if (!pState.getValue(POWERED) && musicBlockEntity.isLastPlay()) { setOutputPowerState(pLevel, pPos, pState, true); musicBlockEntity.setLastPlay(false); } else if (pState.getValue(POWERED)) setOutputPowerState(pLevel, pPos, pState, false); } private void setPlayingState(World pLevel, BlockPos pPos, BlockState pState, boolean pIsPlaying) { pLevel.setBlock(pPos, pState.setValue(PLAYING, pIsPlaying), BlockFlags.BLOCK_UPDATE | BlockFlags.NOTIFY_NEIGHBORS); pLevel.getBlockTicks().scheduleTick(pPos, this, 4); } private void setOutputPowerState(World pLevel, BlockPos pPos, BlockState pState, boolean pIsPowered) { pLevel.setBlock(pPos, pState.setValue(POWERED, pIsPowered), BlockFlags.BLOCK_UPDATE | BlockFlags.NOTIFY_NEIGHBORS); pLevel.getBlockTicks().scheduleTick(pPos, this, 4); } @Override public void neighborChanged(BlockState pState, World pLevel, BlockPos pPos, Block pBlock, BlockPos pFromPos, boolean pIsMoving) { getMusicBlockEntity(pLevel, pPos).filter(p -> !pLevel.isClientSide()).ifPresent( musicBlockEntity -> { // get redStone input from the rear side boolean isSidePowered = pLevel.hasSignal(pPos.relative(pState.getValue(HORIZONTAL_FACING).getOpposite()), pState.getValue(HORIZONTAL_FACING)); // Lever spam prevention. see use method above for more details. if (musicBlockEntity.notHeld()) { if ((musicBlockEntity.getPreviousInputState() != isSidePowered) && musicBlockEntity.isRearRedstoneInputEnabled()) { if (isSidePowered) { boolean isPlaying = canPlayOrStopMusic(pLevel, pState, pPos, false); if (isPlaying) musicBlockEntity.setLastPlay(true); setPlayingState(pLevel, pPos, pState, isPlaying); } musicBlockEntity.setPreviousInputState(isSidePowered); } } musicBlockEntity.useHeldCounterUpdate(pState.getValue(PLAYING)); }); } @Override public boolean canConnectRedstone(BlockState state, IBlockReader world, BlockPos pos, @Nullable Direction side) { return getMusicBlockEntity(world, pos).filter(p -> side != null).map( musicBlockEntity -> { Direction direction = state.getValue(HORIZONTAL_FACING); boolean canConnectBack = musicBlockEntity.isRearRedstoneInputEnabled() && direction == side; boolean canConnectLeft = musicBlockEntity.isLeftRedstoneOutputEnabled() && direction.getCounterClockWise() == side; boolean canConnectRight = musicBlockEntity.isRightRedstoneOutputEnabled() && direction.getClockWise() == side; return canConnectBack || canConnectLeft || canConnectRight; }).orElse(false); } @Override public int getSignal(BlockState pBlockState, IBlockReader pBlockAccess, BlockPos pPos, Direction pSide) { return getMusicBlockEntity(pBlockAccess, pPos).map( musicBlockEntity -> { Direction direction = pBlockState.getValue(HORIZONTAL_FACING); boolean canConnectLeft = musicBlockEntity.isLeftRedstoneOutputEnabled() && direction.getCounterClockWise() == pSide; boolean canConnectRight = musicBlockEntity.isRightRedstoneOutputEnabled() && direction.getClockWise() == pSide; return (pBlockState.getValue(POWERED) && (canConnectLeft || canConnectRight) ? 15 : 0); }).orElse(0); } @Override public int getDirectSignal(BlockState pBlockState, IBlockReader pBlockAccess, BlockPos pPos, Direction pSide) { return super.getSignal(pBlockState, pBlockAccess, pPos, pSide); } // This prevents this block from conducting redstone signals. @Override public boolean shouldCheckWeakPower(BlockState state, IWorldReader world, BlockPos pos, Direction side) { return false; } @Override public boolean isSignalSource(BlockState pState) { return true; } @Override public BlockState mirror(BlockState state, Mirror mirrorIn) { return state.rotate(mirrorIn.getRotation(state.getValue(HORIZONTAL_FACING))); } @Override public BlockState rotate(BlockState state, Rotation rot) { return state.setValue(HORIZONTAL_FACING, rot.rotate(state.getValue(HORIZONTAL_FACING))); } @Nullable @Override public BlockState getStateForPlacement(BlockItemUseContext context) { return this.defaultBlockState() .setValue(HORIZONTAL_FACING, context.getHorizontalDirection().getOpposite()) .setValue(PLAYING, Boolean.FALSE) .setValue(POWERED, Boolean.FALSE); } @Override protected void createBlockStateDefinition(StateContainer.Builder<Block, BlockState> builder) { builder.add(HORIZONTAL_FACING, PLAYING, POWERED); } @Override public boolean hasTileEntity(BlockState state) { return true; } @Nullable @Override public TileEntity createTileEntity(BlockState state, IBlockReader world) { return new MusicBlockEntity(); } @Override public void setPlacedBy(World world, BlockPos pos, BlockState state, @Nullable LivingEntity entity, ItemStack stack) { getMusicBlockEntity(world, pos).ifPresent( musicBlockEntity -> { if (stack.hasCustomHoverName()) musicBlockEntity.setCustomName(stack.getHoverName()); if (entity != null) musicBlockEntity.setOwner(entity.getUUID()); } ); } @Override public void playerWillDestroy(World pLevel, BlockPos pPos, BlockState pState, PlayerEntity pPlayer) { if(!pLevel.isClientSide() && !pPlayer.isCreative()) { ItemStack itemStack = getCloneItemStack(pLevel, pPos, pState); ItemEntity itemEntity = new ItemEntity(pLevel, pPos.getX(), pPos.getY(), pPos.getZ(), itemStack); itemEntity.setDefaultPickUpDelay(); pLevel.addFreshEntity(itemEntity); } super.playerWillDestroy(pLevel, pPos, pState, pPlayer); } @Override public ItemStack getCloneItemStack(IBlockReader pLevel, BlockPos pPos, BlockState pState) { ItemStack itemStack = super.getCloneItemStack(pLevel, pPos, pState); getMusicBlockEntity(pLevel, pPos).ifPresent( musicBlockEntity -> { CompoundNBT compoundNBT = musicBlockEntity.save(new CompoundNBT()); if (!compoundNBT.isEmpty()) { // Save Inventory only! No need to keep block position, button state or owner. compoundNBT.remove("x"); compoundNBT.remove("y"); compoundNBT.remove("z"); compoundNBT.remove(MusicBlockEntity.KEY_BUTTON_STATE); compoundNBT.remove(MusicBlockEntity.KEY_OWNER); itemStack.addTagElement("BlockEntityTag", compoundNBT); } if (musicBlockEntity.hasCustomName()) itemStack.setHoverName(musicBlockEntity.getCustomName()); }); return itemStack; } @Override public void appendHoverText(ItemStack pStack, @Nullable IBlockReader pLevel, List<ITextComponent> pTooltip, ITooltipFlag pFlag) { super.appendHoverText(pStack, pLevel, pTooltip, pFlag); CompoundNBT cNBT = pStack.getTagElement("BlockEntityTag"); if (cNBT != null) { CompoundNBT inventoryNBT = cNBT.getCompound("Inventory"); if (inventoryNBT.contains("Items", NBT.TAG_LIST)) { int size = inventoryNBT.contains("Size", NBT.TAG_INT) ? inventoryNBT.getInt("Size") : 27; NonNullList<ItemStack> nonNullList = NonNullList.withSize(size, ItemStack.EMPTY); ItemStackHelper.loadAllItems(inventoryNBT, nonNullList); ItemStack instrumentStack = nonNullList.stream().findFirst().orElse(ItemStack.EMPTY); if (!instrumentStack.isEmpty()) pTooltip.add(SheetMusicHelper.getFormattedMusicTitle(SheetMusicHelper.getIMusicFromIInstrument(instrumentStack))); else pTooltip.add(SheetMusicHelper.getFormattedMusicTitle(ItemStack.EMPTY)); long instrumentCount = nonNullList.stream().filter(p -> (p.getItem() instanceof IInstrument) && !SheetMusicHelper.getIMusicFromIInstrument(p).isEmpty()).count(); if (instrumentCount > 1) pTooltip.add(new StringTextComponent(new TranslationTextComponent("container.mxtune.block_music.more", instrumentCount - 1).getString() + (TextFormatting.ITALIC))); int duration = cNBT.contains("Duration", NBT.TAG_INT) ? cNBT.getInt("Duration") : 0; if (duration > 0) pTooltip.add(new StringTextComponent(SheetMusicHelper.formatDuration(duration)).withStyle(TextFormatting.YELLOW)); } } } private Optional<MusicBlockEntity> getMusicBlockEntity(IBlockReader pLevel, BlockPos pPos) { return pLevel.getBlockEntity(pPos) instanceof MusicBlockEntity ? Optional.ofNullable(((MusicBlockEntity)(pLevel.getBlockEntity(pPos)))) : Optional.empty(); } }
package scalabilityTests.framework.listeners; import java.io.Serializable; import org.apache.log4j.Logger; import org.objectweb.proactive.extensions.annotation.RemoteObject; import org.ow2.proactive.scheduler.common.NotificationData; import org.ow2.proactive.scheduler.common.SchedulerEvent; import org.ow2.proactive.scheduler.common.SchedulerEventListener; import org.ow2.proactive.scheduler.common.job.JobInfo; import org.ow2.proactive.scheduler.common.job.JobState; import org.ow2.proactive.scheduler.common.job.UserIdentification; import org.ow2.proactive.scheduler.common.task.TaskInfo; /** * This is a (simple) scheduler listener. * It listens to all the Scheduler events and just prints them to the logfile. * * @author fabratu * */ @RemoteObject public class SimpleSchedulerListener implements SchedulerEventListener, Serializable { protected static final Logger logger = Logger.getLogger(SimpleSchedulerListener.class); public SimpleSchedulerListener() { } public void jobSubmittedEvent(JobState jobInfo) { logger.info("New job + " + jobInfo.getName() + " + has been started by " + jobInfo.getOwner()); } public void jobStateUpdatedEvent(NotificationData<JobInfo> jobNotification) { logger.info("Job " + jobNotification.getData().getJobId() + " has changed its state to " + jobNotification.getEventType()); } public void schedulerStateUpdatedEvent(SchedulerEvent eventType) { logger.info("Scheduler state changed to:" + eventType); } public void taskStateUpdatedEvent(NotificationData<TaskInfo> taskNotification) { logger.info("Task " + taskNotification.getData().getTaskId() + " has changed its state to " + taskNotification.getEventType()); } public void usersUpdatedEvent(NotificationData<UserIdentification> notification) { logger.info("User info changed for:" + notification.getData().getUsername()); } }
package be.isach.samaritan.birthday; import be.isach.samaritan.Samaritan; import com.google.common.collect.Maps; import net.dv8tion.jda.entities.Guild; import net.dv8tion.jda.entities.User; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.Instant; import org.joda.time.format.DateTimeFormatter; import org.joda.time.format.DateTimeFormatterBuilder; import java.util.List; import java.util.Map; import java.util.TimerTask; public class BirthdayTask extends TimerTask { private Map<User, DateTime> birthdays = Maps.newHashMap(); private Samaritan samaritan; public BirthdayTask(Samaritan samaritan) { this.samaritan = samaritan; this.birthdays.put(samaritan.getJda().getUserById("93721838093352960"), new DateTime(2000, 6, 24, 1, 15, 0)); } @Override public void run() { System.out.println("Checking for birthdays."); DateTime dt = new DateTime(); DateTime dateTime = dt.withZone(DateTimeZone.forID("Europe/Paris")).plusMinutes(2); System.out.println(birthdays.entrySet()); for (Map.Entry entry : birthdays.entrySet()) { User user = (User) entry.getKey(); DateTime birthdayDate = (DateTime) entry.getValue(); System.out.println(birthdayDate.getHourOfDay() + " | " + dateTime.getHourOfDay()); System.out.println(birthdayDate.getMinuteOfHour() + " | " + dateTime.getMinuteOfHour()); System.out.println(birthdayDate.getDayOfMonth() + " | " + dateTime.getDayOfMonth()); System.out.println(birthdayDate.getMonthOfYear() + " | " + dateTime.getMonthOfYear()); if (birthdayDate.getHourOfDay() == dateTime.getHourOfDay() && birthdayDate.getMinuteOfHour() == dateTime.getMinuteOfHour() && birthdayDate.getDayOfMonth() == dateTime.getDayOfMonth() && birthdayDate.getMonthOfYear() == dateTime.getMonthOfYear()) { for (Guild guild : samaritan.getJda().getGuilds()) { if (guild.getId().equals("184045680245997568")) { String stringBuilder = ("Happy birthday " + user.getAsMention() + " !\n") + "You are now " + (dateTime.getYear() - birthdayDate.getYear()) + " years old!\n" + "Birthday is at exactly: " + birthdayDate.toString("dd/MM/yyyy HH:mm"); guild.getTextChannels().get(0).sendMessage(stringBuilder); } } } } } }
package edu.harvard.iq.dataverse.authorization.users; import edu.harvard.iq.dataverse.DatasetLock; import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo; import edu.harvard.iq.dataverse.authorization.AuthenticatedUserLookup; import edu.harvard.iq.dataverse.mocks.MocksFactory; import java.sql.Timestamp; import java.util.Date; import java.util.List; import org.junit.Test; import static org.junit.Assert.*; import org.junit.Before; /** * Tested class: AuthenticatedUser.java * * @author bsilverstein */ public class AuthenticatedUserTest { public AuthenticatedUserTest() { } public static AuthenticatedUser testUser; public static Timestamp expResult; public static Timestamp loginTime = Timestamp.valueOf("2000-01-01 00:00:00.0"); public static final String IDENTIFIER_PREFIX = "@"; @Before public void setUp() { testUser = MocksFactory.makeAuthenticatedUser("Homer", "Simpson"); expResult = testUser.getCreatedTime(); } @Test public void testGetIdentifier() { System.out.println("getIdentifier for testUser"); String result = testUser.getIdentifier(); assertEquals(testUser.getIdentifier(), result); } @Test public void testApplyDisplayInfo() { System.out.println("applyDisplayInfo"); AuthenticatedUserDisplayInfo inf = new AuthenticatedUserDisplayInfo("Homer", "Simpson", "Homer.Simpson@someU.edu", "UnitTester", "In-Memory user"); testUser.applyDisplayInfo(inf); assertEquals(inf, testUser.getDisplayInfo()); } @Test public void testGetDisplayInfo() { System.out.println("getDisplayInfo"); AuthenticatedUserDisplayInfo expResult = new AuthenticatedUserDisplayInfo("Homer", "Simpson", "Homer.Simpson@someU.edu", "UnitTester", "In-Memory user"); AuthenticatedUserDisplayInfo result = testUser.getDisplayInfo(); assertEquals(expResult, result); } @Test public void testIsAuthenticated() { System.out.println("isAuthenticated"); boolean expResult = true; boolean result = testUser.isAuthenticated(); assertEquals(expResult, result); } @Test public void testGetId() { System.out.println("getId"); Long expResult = testUser.getId(); assertEquals(expResult, testUser.getId()); } @Test public void testSetId() { System.out.println("setId"); Long id = 1776L; testUser.setId(id); assertEquals(id, testUser.getId()); } @Test public void testGetUserIdentifier() { System.out.println("getUserIdentifier"); String expResult = testUser.getUserIdentifier(); assertEquals(expResult, testUser.getUserIdentifier()); } @Test public void testSetUserIdentifier() { System.out.println("setUserIdentifier"); String userIdentifier = "Davis"; testUser.setUserIdentifier(userIdentifier); assertEquals(testUser.getUserIdentifier(), userIdentifier); } @Test public void testGetName() { System.out.println("getName"); String expResult = "Homer Simpson"; String result = testUser.getName(); assertEquals(expResult, result); } @Test public void testGetEmail() { System.out.println("getEmail"); String expResult = testUser.getEmail();; assertEquals(expResult, testUser.getEmail()); } @Test public void testSetEmail() { System.out.println("setEmail"); String email = "HomerSimpson@someU.edu"; testUser.setEmail(email); assertEquals(testUser.getEmail(), email); } @Test public void testGetAffiliation() { System.out.println("getAffiliation"); String expResult = "UnitTester"; String result = testUser.getAffiliation(); assertEquals(expResult, result); } @Test public void testSetAffiliation() { System.out.println("setAffiliation"); String affiliation = "FamilyMan"; testUser.setAffiliation(affiliation); assertEquals(affiliation, testUser.getAffiliation()); } @Test public void testGetPosition() { System.out.println("getPosition"); String result = testUser.getPosition(); assertEquals("In-Memory user", result); } @Test public void testSetPosition() { System.out.println("setPosition"); String position = ""; testUser.setPosition(position); } @Test public void testGetLastName() { System.out.println("getLastName"); String expResult = "Simpson"; String result = testUser.getLastName(); assertEquals(expResult, result); } @Test public void testSetLastName() { System.out.println("setLastName"); String lastName = ""; testUser.setLastName(lastName); } @Test public void testGetFirstName() { System.out.println("getFirstName"); String result = testUser.getFirstName(); assertEquals("Homer", result); } @Test public void testSetFirstName() { System.out.println("setFirstName"); String firstName = ""; testUser.setFirstName(firstName); } @Test public void testGetEmailConfirmed() { System.out.println("getEmailConfirmed"); Timestamp expResult = null; Timestamp result = testUser.getEmailConfirmed(); assertEquals(expResult, result); } @Test public void testSetEmailConfirmed() { System.out.println("setEmailConfirmed"); Timestamp emailConfirmed = null; testUser.setEmailConfirmed(emailConfirmed); } @Test public void testGetShibIdentityProvider() { System.out.println("getShibIdentityProvider"); String expResult = testUser.getShibIdentityProvider();; assertEquals(expResult, testUser.getShibIdentityProvider()); } @Test public void testSetShibIdentityProvider() { System.out.println("setShibIdentityProvider"); String shibIdentityProvider = "Davis"; testUser.setShibIdentityProvider(shibIdentityProvider); String result = testUser.getShibIdentityProvider(); assertEquals("Davis", result); } @Test public void testToString() { System.out.println("toString"); String expResult = "[AuthenticatedUser identifier:" + testUser.getIdentifier() + "]"; String result = testUser.toString(); assertEquals(expResult, result); } @Test public void testGetSortByString() { System.out.println("getSortByString"); String expResult = testUser.getLastName() + " " + testUser.getFirstName() + " " + testUser.getUserIdentifier(); String result = testUser.getSortByString(); assertEquals(expResult, result); } @Test public void testSetLastLoginTime() { System.out.println("setLastLogin"); testUser.setLastLoginTime(loginTime); Timestamp lastLogin = testUser.getLastLoginTime(); assertEquals(loginTime, lastLogin); } @Test public void testGetLastLoginTime() { System.out.println("getLastLoginTime"); Timestamp expResult = testUser.getLastLoginTime(); assertEquals(expResult, testUser.getLastLoginTime()); } @Test public void testGetCreatedTime() { System.out.println("getCreatedTime"); Timestamp result = testUser.getCreatedTime(); assertEquals(testUser.getCreatedTime(), result); } @Test public void testSetCreatedTime() { System.out.println("setCreatedTime"); Timestamp createdTime = new Timestamp(new Date().getTime()); testUser.setCreatedTime(createdTime); assertEquals(testUser.getCreatedTime(), createdTime); } @Test public void testGetLastApiUseTime() { System.out.println("getLastApiUseTime"); Timestamp result = testUser.getLastApiUseTime(); assertEquals(testUser.getLastApiUseTime(), result); } @Test public void testSetLastApiUseTime() { System.out.println("setLastApiUseTime"); Timestamp lastApiUseTime = new Timestamp(new Date().getTime()); testUser.setLastApiUseTime(lastApiUseTime); assertEquals(testUser.getLastApiUseTime(), lastApiUseTime); } @Test public void testSetLastApiUseToCurrentTime() { System.out.println("setLastApiUseToCurrentTime"); testUser.setLastApiUseTime(new Timestamp(new Date().getTime())); Timestamp expResult = testUser.getLastApiUseTime(); assertEquals(expResult, testUser.getLastApiUseTime()); } @Test public void testIsSuperuser() { System.out.println("isSuperuser"); boolean expResult = false; boolean result = testUser.isSuperuser(); assertEquals(expResult, result); } @Test public void testSetSuperuser() { System.out.println("setSuperuser"); boolean superuser = true; testUser.setSuperuser(superuser); assertEquals(testUser.isSuperuser(), true); } @Test public void testGetAuthenticatedUserLookup() { System.out.println("getAuthenticatedUserLookup"); AuthenticatedUserLookup result = testUser.getAuthenticatedUserLookup(); assertEquals(testUser.getAuthenticatedUserLookup(), result); } @Test public void testSetAuthenticatedUserLookup() { System.out.println("setAuthenticatedUserLookup"); AuthenticatedUserLookup authenticatedUserLookup = testUser.getAuthenticatedUserLookup(); testUser.setAuthenticatedUserLookup(authenticatedUserLookup); assertEquals(authenticatedUserLookup, testUser.getAuthenticatedUserLookup()); } @Test public void testHashCode() { System.out.println("hashCode"); AuthenticatedUser instance = new AuthenticatedUser(); int expResult = 0; int result = instance.hashCode(); assertEquals(expResult, result); } /** * All commented tests below have only been generated / are not complete for * AuthenticatedUser.java The tests above should all run fine, due to time * constraints on this issue these 1+1=2 type tests weren't all done. */ // @Test // public void testEquals() { // System.out.println("equals"); // Object object = (testUser instanceof AuthenticatedUser); // boolean expResult = true; // boolean result = testUser.equals(object); // assertEquals(expResult, result); // @Test // public void testGetDatasetLocks() { // System.out.println("getDatasetLocks"); // List<DatasetLock> expResult = null; // List<DatasetLock> result = instance.getDatasetLocks(); // assertEquals(expResult, result); // @Test // public void testSetDatasetLocks() { // System.out.println("setDatasetLocks"); // List<DatasetLock> datasetLocks = null; // instance.setDatasetLocks(datasetLocks); }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package be.luckycode.projetawebservice; import java.io.Serializable; import java.util.Collection; import javax.persistence.Basic; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.JoinTable; import javax.persistence.ManyToMany; import javax.persistence.NamedQueries; import javax.persistence.NamedQuery; import javax.persistence.Table; import javax.validation.constraints.NotNull; import javax.validation.constraints.Size; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlTransient; /** * * @author michael */ @Entity @Table(name = "usergroup") @XmlRootElement @NamedQueries({ @NamedQuery(name = "Usergroup.findAll", query = "SELECT u FROM Usergroup u"), @NamedQuery(name = "Usergroup.findByUsergroupId", query = "SELECT u FROM Usergroup u WHERE u.usergroupId = :usergroupId"), @NamedQuery(name = "Usergroup.findByCode", query = "SELECT u FROM Usergroup u WHERE u.code = :code"), @NamedQuery(name = "Usergroup.findByComment", query = "SELECT u FROM Usergroup u WHERE u.comment = :comment")}) public class Usergroup implements Serializable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy = GenerationType.IDENTITY) @Basic(optional = false) @NotNull @Column(name = "usergroup_id") private Integer usergroupId; @Basic(optional = false) @NotNull @Size(min = 1, max = 50) @Column(name = "code") private String code; @Size(max = 255) @Column(name = "comment") private String comment; @JoinTable(name = "user_usergroup", joinColumns = { @JoinColumn(name = "usergroup_id", referencedColumnName = "usergroup_id")}, inverseJoinColumns = { @JoinColumn(name = "user_id", referencedColumnName = "user_id")}) @ManyToMany private Collection<User> userCollection; @ManyToMany(mappedBy = "usergroupCollection") private Collection<Project> projectCollection; public Usergroup() { } public Usergroup(Integer usergroupId) { this.usergroupId = usergroupId; } public Usergroup(Integer usergroupId, String code) { this.usergroupId = usergroupId; this.code = code; } public Integer getUsergroupId() { return usergroupId; } public void setUsergroupId(Integer usergroupId) { this.usergroupId = usergroupId; } public String getCode() { return code; } public void setCode(String code) { this.code = code; } public String getComment() { return comment; } public void setComment(String comment) { this.comment = comment; } @XmlTransient public Collection<User> getUserCollection() { return userCollection; } public void setUserCollection(Collection<User> userCollection) { this.userCollection = userCollection; } @XmlTransient public Collection<Project> getProjectCollection() { return projectCollection; } public void setProjectCollection(Collection<Project> projectCollection) { this.projectCollection = projectCollection; } @Override public int hashCode() { int hash = 0; hash += (usergroupId != null ? usergroupId.hashCode() : 0); return hash; } @Override public boolean equals(Object object) { // TODO: Warning - this method won't work in the case the id fields are not set if (!(object instanceof Usergroup)) { return false; } Usergroup other = (Usergroup) object; if ((this.usergroupId == null && other.usergroupId != null) || (this.usergroupId != null && !this.usergroupId.equals(other.usergroupId))) { return false; } return true; } @Override public String toString() { return "be.luckycode.projetawebservice.Usergroup[ usergroupId=" + usergroupId + " ]"; } }
package cn.byhieg.algorithmtutorial; /** * * BSTo(N) * * <p> * * 1. * 2. * 3. * 4. * 5. * <p> * 5 */ public class RedBlackTree { Node root; public RedBlackTree() { } public RedBlackTree(int value) { root = new Node(value); } public Node find(int value) { if (root == null) { throw new RuntimeException(""); } Node currentNode = root; while (currentNode != null && currentNode.getValue() != value) { if (currentNode.getValue() < value) { currentNode = currentNode.getLeft(); } else { currentNode = currentNode.getRight(); } } return currentNode; } public void insertNode(int value) { Node node = new Node(value); insertNode(node); } /** * * * * * @param node */ public void insertNode(Node node) { int cmp; Node y = null; Node x = this.root; while (x != null) { y = x; cmp = node.getValue() - x.getValue(); if (cmp < 0) { x = x.left; } else { x = x.right; } } node.parent = y; if (y != null) { cmp = node.getValue() - y.getValue(); if (cmp < 0) { y.left = node; } else { y.right = node; } } else { this.root = node; } node.isRed = true; insertFixUp(node); } /** * * * 3 * 1. * 2. * 3. * <p> * <p> * * * C,B,A. * BCABABA * BCABABA * C,B,A. * CBB * CBB * * @param node */ private void insertFixUp(Node node) { Node parent, grandParent, uncle; while ((parent = parentOf(node)) != null && parent.isRed()) { grandParent = parentOf(node); if (parent == grandParent.left) { uncle = grandParent.right; if ((uncle != null) && uncle.isRed()) { uncle.makeBlack(); parent.makeBlack(); grandParent.makeRed(); node = grandParent; continue; } if (parent.right == node) { Node tmp; rotateLeft(parent); tmp = parent; parent = node; node = tmp; } parent.makeBlack(); grandParent.makeRed(); rotateRight(grandParent); } else { uncle = grandParent.left; if ((uncle != null) && uncle.isRed()) { uncle.makeBlack(); parent.makeBlack(); grandParent.makeRed(); node = grandParent; continue; } if (parent.left == node) { Node tmp; rotateRight(parent); tmp = parent; parent = node; node = tmp; } parent.makeBlack(); grandParent.makeRed(); rotateLeft(grandParent); } } root.makeBlack(); } /** * (y) * * (y) * py py * / / * y x * / \ --()-. / \ # * x ry lx y * / \ / \ # * lx rx rx ry * * @param y */ private void rotateRight(Node y) { Node x = y.left; y.left = x.right; if (x.right != null) x.right.parent = y; x.parent = y.parent; if (y.parent == null) { this.root = x; } else { if (y == y.parent.right) y.parent.right = x; else y.parent.left = x; } x.right = y; y.parent = x; } /** * * (x) * * (x) * px px * / / * x y * / \ --()-. / \ # * lx y x ry * / \ / \ * ly ry lx ly * * @param x */ private void rotateLeft(Node x) { Node y = x.getRight(); x.right = y.left; if (y.left != null) { y.left.parent = x; } y.parent = x.parent; if (x.parent == null) { root = y; }else{ if (x.parent.left == x) { x.parent.left = y; }else{ x.parent.right = y; } } y.left = x; x.parent = y; } private Node parentOf(Node node) { return node != null ? node.parent : null; } static class Node { private int value; private Node parent; private boolean isRed; private Node left; private Node right; public Node() { } public Node(int value) { this.value = value; } public Node(int value, boolean isRed) { this.value = value; this.isRed = isRed; } public int getValue() { return value; } public void setValue(int value) { this.value = value; } public Node getParent() { return parent; } public void setParent(Node parent) { this.parent = parent; } public boolean isRed() { return isRed; } public boolean isBlack() { return !isRed(); } public Node getLeft() { return left; } public void setLeft(Node left) { this.left = left; } public Node getRight() { return right; } public void setRight(Node right) { this.right = right; } public void makeRed() { isRed = true; } public void makeBlack() { isRed = false; } } }
package org.sagebionetworks.web.unitclient.presenter; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.util.ArrayList; import java.util.List; import org.gwtbootstrap3.extras.bootbox.client.callback.ConfirmCallback; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; import org.sagebionetworks.repo.model.UserProfile; import org.sagebionetworks.repo.model.UserSessionData; import org.sagebionetworks.repo.model.auth.Session; import org.sagebionetworks.schema.adapter.AdapterFactory; import org.sagebionetworks.schema.adapter.JSONObjectAdapterException; import org.sagebionetworks.schema.adapter.org.json.AdapterFactoryImpl; import org.sagebionetworks.web.client.DisplayConstants; import org.sagebionetworks.web.client.GWTWrapper; import org.sagebionetworks.web.client.GlobalApplicationState; import org.sagebionetworks.web.client.PlaceChanger; import org.sagebionetworks.web.client.PortalGinInjector; import org.sagebionetworks.web.client.SynapseClientAsync; import org.sagebionetworks.web.client.UserAccountServiceAsync; import org.sagebionetworks.web.client.place.LoginPlace; import org.sagebionetworks.web.client.place.Profile; import org.sagebionetworks.web.client.presenter.SettingsPresenter; import org.sagebionetworks.web.client.security.AuthenticationController; import org.sagebionetworks.web.client.utils.Callback; import org.sagebionetworks.web.client.view.SettingsView; import org.sagebionetworks.web.client.widget.entity.controller.SynapseAlert; import org.sagebionetworks.web.client.widget.login.PasswordStrengthWidget; import org.sagebionetworks.web.client.widget.profile.UserProfileModalWidget; import org.sagebionetworks.web.client.widget.subscription.SubscriptionListWidget; import org.sagebionetworks.web.shared.WebConstants; import org.sagebionetworks.web.shared.exceptions.RestServiceException; import org.sagebionetworks.web.test.helper.AsyncMockStubber; import com.google.gwt.user.client.rpc.AsyncCallback; import com.google.gwt.user.client.ui.Widget; public class SettingsPresenterTest { private static final String APIKEY = "MYAPIKEY"; private static final String APIKEY2 = "MYAPIKEY2"; SettingsPresenter presenter; SettingsView mockView; AuthenticationController mockAuthenticationController; UserAccountServiceAsync mockUserService; GlobalApplicationState mockGlobalApplicationState; PlaceChanger mockPlaceChanger; SynapseClientAsync mockSynapseClient; GWTWrapper mockGWT; PortalGinInjector mockInjector; SynapseAlert mockSynAlert; UserProfileModalWidget mockUserProfileModalWidget; UserSessionData testUser = new UserSessionData(); UserProfile profile = new UserProfile(); String password = "password"; String newPassword = "otherpassword"; String username = "testuser"; String email = "testuser@test.com"; AdapterFactory adapterFactory = new AdapterFactoryImpl(); @Mock SubscriptionListWidget mockSubscriptionListWidget; @Mock PasswordStrengthWidget mockPasswordStrengthWidget; @Before public void setup() throws JSONObjectAdapterException{ MockitoAnnotations.initMocks(this); mockView = mock(SettingsView.class); mockAuthenticationController = mock(AuthenticationController.class); mockUserService = mock(UserAccountServiceAsync.class); mockPlaceChanger = mock(PlaceChanger.class); mockGlobalApplicationState = mock(GlobalApplicationState.class); mockSynapseClient = mock(SynapseClientAsync.class); mockGWT = mock(GWTWrapper.class); mockInjector = mock(PortalGinInjector.class); mockSynAlert = mock(SynapseAlert.class); mockUserProfileModalWidget = mock(UserProfileModalWidget.class); when(mockInjector.getSynapseAlertWidget()).thenReturn(mockSynAlert); presenter = new SettingsPresenter(mockView, mockAuthenticationController, mockUserService, mockGlobalApplicationState, mockSynapseClient, mockGWT, mockInjector, mockUserProfileModalWidget, mockSubscriptionListWidget,mockPasswordStrengthWidget); verify(mockView).setPresenter(presenter); verify(mockView).setSubscriptionsListWidget(any(Widget.class)); when(mockAuthenticationController.isLoggedIn()).thenReturn(true); when(mockAuthenticationController.getCurrentUserSessionData()).thenReturn(testUser); when(mockGlobalApplicationState.getPlaceChanger()).thenReturn(mockPlaceChanger); AsyncMockStubber.callSuccessWith(APIKEY).when(mockSynapseClient).getAPIKey(any(AsyncCallback.class)); AsyncMockStubber.callSuccessWith(profile).when(mockSynapseClient).getUserProfile(anyString(), any(AsyncCallback.class)); AsyncMockStubber.callSuccessWith(null).when(mockSynapseClient).updateUserProfile(any(UserProfile.class), any(AsyncCallback.class)); AsyncMockStubber.callSuccessWith(email).when(mockSynapseClient).getNotificationEmail(any(AsyncCallback.class)); AsyncMockStubber.callSuccessWith(null).when(mockSynapseClient).setNotificationEmail(anyString(), any(AsyncCallback.class)); AsyncMockStubber.callSuccessWith(null).when(mockSynapseClient).additionalEmailValidation(anyString(), anyString(), anyString(), any(AsyncCallback.class)); AsyncMockStubber.callSuccessWith(APIKEY2).when(mockSynapseClient).deleteApiKey(any(AsyncCallback.class)); profile.setDisplayName("tester"); profile.setEmail(username); profile.setUserName(username); List<String> emails = new ArrayList<String>(); emails.add(email); profile.setEmails(emails); testUser.setProfile(profile); testUser.setSession(new Session()); testUser.getSession().setSessionToken("token"); testUser.setIsSSO(false); } @Test public void testResetPassword() throws RestServiceException { AsyncMockStubber.callSuccessWith(testUser).when(mockAuthenticationController).loginUser(eq(username), eq(password), any(AsyncCallback.class)); AsyncMockStubber.callSuccessWith(null).when(mockUserService).changePassword(anyString(), eq(newPassword), any(AsyncCallback.class)); AsyncMockStubber.callSuccessWith(testUser).when(mockAuthenticationController).loginUser(eq(username), eq(newPassword), any(AsyncCallback.class)); presenter.resetPassword(password, newPassword); verify(mockView).showPasswordChangeSuccess(); verify(mockPasswordStrengthWidget).setVisible(false); } @Test public void testResetPasswordFailInitialLogin() throws RestServiceException { AsyncMockStubber.callFailureWith(null).when(mockAuthenticationController).loginUser(eq(username), eq(password), any(AsyncCallback.class)); presenter.resetPassword(password, newPassword); verify(mockSynAlert).showError("Incorrect password. Please enter your existing Synapse password."); verify(mockView).setCurrentPasswordInError(true); } @Test public void testResetPasswordFailChangePw() throws RestServiceException { AsyncMockStubber.callSuccessWith(testUser).when(mockAuthenticationController).loginUser(eq(username), eq(password), any(AsyncCallback.class)); Exception ex = new Exception("pw change failed"); AsyncMockStubber.callFailureWith(ex).when(mockUserService).changePassword(anyString(), eq(newPassword), any(AsyncCallback.class)); presenter.resetPassword(password, newPassword); verify(mockSynAlert).clear(); verify(mockSynAlert).handleException(ex); } @Test public void testResetPasswordFailFinalLogin() throws RestServiceException { AsyncMockStubber.callSuccessWith(testUser).when(mockAuthenticationController).loginUser(eq(username), eq(password), any(AsyncCallback.class)); AsyncMockStubber.callSuccessWith(null).when(mockUserService).changePassword(anyString(), eq(newPassword), any(AsyncCallback.class)); AsyncMockStubber.callFailureWith(new Exception()).when(mockAuthenticationController).loginUser(eq(username), eq(newPassword), any(AsyncCallback.class)); presenter.resetPassword(password, newPassword); verify(mockView).showPasswordChangeSuccess(); verify(mockPlaceChanger).goTo(any(LoginPlace.class)); } //if notification settings are null, should still successfully update with user specified notification setting public void testUpdateMyNotificationSettingsLazyInstantiation() throws JSONObjectAdapterException { //creates new UserProfile notification settings boolean sendEmailNotifications = true; boolean markEmailedMessagesAsRead = true; assertNull(profile.getNotificationSettings()); presenter.updateMyNotificationSettings(sendEmailNotifications, markEmailedMessagesAsRead); ArgumentCaptor<UserProfile> argument = ArgumentCaptor.forClass(UserProfile.class); //should have called updateUserProfile verify(mockSynapseClient).updateUserProfile(argument.capture(), any(AsyncCallback.class)); //with our new notification settings UserProfile updatedProfile = argument.getValue(); assertNotNull(updatedProfile.getNotificationSettings()); assertEquals(sendEmailNotifications, updatedProfile.getNotificationSettings().getSendEmailNotifications()); assertEquals(markEmailedMessagesAsRead, updatedProfile.getNotificationSettings().getMarkEmailedMessagesAsRead()); verify(mockView).showInfo(eq(DisplayConstants.UPDATED_NOTIFICATION_SETTINGS), anyString()); } @Test public void testUpdateMyNotificationSettings() throws JSONObjectAdapterException { //updates existing UserProfile notification settings boolean sendEmailNotifications = false; boolean markEmailedMessagesAsRead = false; org.sagebionetworks.repo.model.message.Settings notificationSettings = new org.sagebionetworks.repo.model.message.Settings(); notificationSettings.setMarkEmailedMessagesAsRead(true); notificationSettings.setSendEmailNotifications(true); profile.setNotificationSettings(notificationSettings); assertNotNull(profile.getNotificationSettings()); presenter.updateMyNotificationSettings(sendEmailNotifications, markEmailedMessagesAsRead); ArgumentCaptor<UserProfile> argument = ArgumentCaptor.forClass(UserProfile.class); //should have called updateUserProfile verify(mockSynapseClient).updateUserProfile(argument.capture(), any(AsyncCallback.class)); //with our new notification settings UserProfile updatedProfile = argument.getValue(); assertEquals(sendEmailNotifications, updatedProfile.getNotificationSettings().getSendEmailNotifications()); assertEquals(markEmailedMessagesAsRead, updatedProfile.getNotificationSettings().getMarkEmailedMessagesAsRead()); verify(mockView).showInfo(eq(DisplayConstants.UPDATED_NOTIFICATION_SETTINGS), anyString()); } @Test public void testUpdateMyNotificationSettingsFailure() throws JSONObjectAdapterException { Exception ex = new Exception("unexpected exception"); AsyncMockStubber.callFailureWith(ex).when(mockSynapseClient).updateUserProfile(any(UserProfile.class), any(AsyncCallback.class)); presenter.updateMyNotificationSettings(true, true); verify(mockSynapseClient).updateUserProfile(any(UserProfile.class), any(AsyncCallback.class)); verify(mockSynAlert).handleException(ex); } @Test public void testGetUserNotificationEmail() throws JSONObjectAdapterException { presenter.getUserNotificationEmail(); verify(mockSynapseClient).getNotificationEmail(any(AsyncCallback.class)); verify(mockView).showNotificationEmailAddress(eq(email)); } @Test public void testGetUserNotificationEmailFailure() throws JSONObjectAdapterException { Exception caught = new Exception("unexpected exception"); AsyncMockStubber.callFailureWith(caught).when(mockSynapseClient).getNotificationEmail(any(AsyncCallback.class)); presenter.getUserNotificationEmail(); verify(mockSynapseClient).getNotificationEmail(any(AsyncCallback.class)); verify(mockSynAlert).handleException(caught); } @Test public void testSetUserNotificationEmail() throws JSONObjectAdapterException { presenter.setUserNotificationEmail(email); verify(mockSynapseClient).setNotificationEmail(eq(email), any(AsyncCallback.class)); //reload profile verify(mockPlaceChanger).goTo(any(Profile.class)); } @Test public void testSetUserNotificationEmailFailure() throws JSONObjectAdapterException { Exception caught = new Exception("unexpected exception"); AsyncMockStubber.callFailureWith(caught).when(mockSynapseClient).setNotificationEmail(anyString(), any(AsyncCallback.class)); presenter.setUserNotificationEmail(email); verify(mockSynapseClient).setNotificationEmail(anyString(), any(AsyncCallback.class)); verify(mockSynAlert).handleException(caught); } @Test public void testAdditionalEmailValidation() throws JSONObjectAdapterException { presenter.additionalEmailValidation(email); verify(mockSynapseClient).additionalEmailValidation(anyString(), anyString(), anyString(), any(AsyncCallback.class)); verify(mockView).showEmailChangeSuccess(anyString()); } @Test public void testAdditionalEmailValidationFailure() throws JSONObjectAdapterException { Exception ex = new Exception("unexpected exception"); AsyncMockStubber.callFailureWith(ex).when(mockSynapseClient).additionalEmailValidation(anyString(), anyString(), anyString(), any(AsyncCallback.class)); presenter.additionalEmailValidation(email); verify(mockSynapseClient).additionalEmailValidation(anyString(), anyString(), anyString(), any(AsyncCallback.class)); verify(mockSynAlert).handleException(ex); } @Test public void testAdditionalEmailValidationInvalidEmail() throws JSONObjectAdapterException { String email = "invalidEmailAddress"; presenter.additionalEmailValidation(email); verify(mockSynAlert).showError(WebConstants.INVALID_EMAIL_MESSAGE); } @Test (expected=IllegalStateException.class) public void testAddEmailNullEmails(){ profile.setEmails(null); presenter.addEmail(email); } @Test (expected=IllegalStateException.class) public void testAddEmailEmptyEmails(){ profile.setEmails(new ArrayList()); presenter.addEmail(email); } @Test public void testAddEmailNewEmail(){ String email2 = "testuser2@test.com"; presenter.addEmail(email2); verify(mockSynapseClient).additionalEmailValidation(anyString(), anyString(), anyString(), any(AsyncCallback.class)); } @Test public void testAddEmailNewEmailWithSpaces(){ String email3 = " testuser3@test.com "; presenter.addEmail(email3); verify(mockSynapseClient).additionalEmailValidation(anyString(), Mockito.eq("testuser3@test.com"), anyString(), any(AsyncCallback.class)); } @Test public void testAddEmailExistingEmail(){ presenter.addEmail(email); verify(mockSynapseClient).setNotificationEmail(eq(email), any(AsyncCallback.class)); } @Test public void testGetAPIKey() { presenter.getAPIKey(); verify(mockSynapseClient).getAPIKey(any(AsyncCallback.class)); verify(mockView).setApiKey(APIKEY); //verify not cached presenter.getAPIKey(); verify(mockSynapseClient, times(2)).getAPIKey(any(AsyncCallback.class)); } @Test public void testGetAPIKeyFailure() { Exception e = new Exception(); AsyncMockStubber.callFailureWith(e).when(mockSynapseClient).getAPIKey(any(AsyncCallback.class)); presenter.getAPIKey(); verify(mockSynapseClient).getAPIKey(any(AsyncCallback.class)); verify(mockSynAlert).handleException(e); } @Test public void testOnEditProfile() { presenter.onEditProfile(); ArgumentCaptor<Callback> captor = ArgumentCaptor.forClass(Callback.class); verify(mockUserProfileModalWidget).showEditProfile(anyString(), captor.capture()); captor.getValue().invoke(); verify(mockPlaceChanger).goTo(any(Profile.class)); } @Test public void testConfigure() { presenter.configure(); verify(mockSynAlert, times(5)).clear(); verify(mockPasswordStrengthWidget).setVisible(false); verify(mockView).clear(); verify(mockSubscriptionListWidget).configure(); verify(mockView).updateNotificationCheckbox(profile); verify(mockAuthenticationController).updateCachedProfile(profile); } @Test public void testConfigureFailure() { Exception ex = new Exception("error occurred"); AsyncMockStubber.callFailureWith(ex).when(mockSynapseClient).getUserProfile(anyString(), any(AsyncCallback.class)); presenter.configure(); verify(mockView).clear(); verify(mockSubscriptionListWidget).configure(); verify(mockSynAlert).handleException(ex); } @Test public void testAsWidget() { presenter.asWidget(); verify(mockView).asWidget(); } @Test public void testConfigureAnonymousSWC2943() { //used to result in NPE before fix for SWC-2943 when(mockAuthenticationController.isLoggedIn()).thenReturn(false); when(mockAuthenticationController.getCurrentUserSessionData()).thenReturn(null); presenter.configure(); verify(mockView).clear(); } @Test public void testConfirmAPIKeyChange(){ presenter.changeApiKey(); ArgumentCaptor<ConfirmCallback> captor = ArgumentCaptor.forClass(ConfirmCallback.class); verify(mockView).showConfirm(anyString(), captor.capture()); ConfirmCallback callback = captor.getValue(); //test not confirmed (user clicked cancel) callback.callback(false); verify(mockSynapseClient, never()).deleteApiKey(any(AsyncCallback.class)); verify(mockView, never()).setApiKey(APIKEY2); callback.callback(true); verify(mockSynapseClient).deleteApiKey(any(AsyncCallback.class)); verify(mockView).setApiKey(APIKEY2); } @Test public void testAPIKeyChangeConfirmedFailure(){ Exception e = new Exception(); AsyncMockStubber.callFailureWith(e).when(mockSynapseClient).deleteApiKey(any(AsyncCallback.class)); presenter.changeApiKeyPostConfirmation(); verify(mockSynapseClient).deleteApiKey(any(AsyncCallback.class)); verify(mockSynAlert).handleException(e); } @Test public void testChangePasswordCurrentPasswordFailure() { when(mockView.getCurrentPasswordField()).thenReturn(""); presenter.changePassword(); verify(mockView).getCurrentPasswordField(); verify(mockView).getPassword1Field(); verify(mockView).getPassword2Field(); verify(mockSynAlert).showError(DisplayConstants.ERROR_ALL_FIELDS_REQUIRED); verify(mockView).setCurrentPasswordInError(true); } @Test public void testChangePasswordPassword1Failure() { when(mockView.getCurrentPasswordField()).thenReturn(password); when(mockView.getPassword1Field()).thenReturn(""); presenter.changePassword(); verify(mockView).getCurrentPasswordField(); verify(mockView).getPassword1Field(); verify(mockView).getPassword2Field(); verify(mockSynAlert).showError(DisplayConstants.ERROR_ALL_FIELDS_REQUIRED); verify(mockView).setPassword1InError(true); } @Test public void testChangePasswordPassword2Failure() { // empty second password when(mockView.getCurrentPasswordField()).thenReturn(password); when(mockView.getPassword1Field()).thenReturn(newPassword); when(mockView.getPassword2Field()).thenReturn(""); presenter.changePassword(); verify(mockView).getCurrentPasswordField(); verify(mockView).getPassword1Field(); verify(mockView).getPassword2Field(); verify(mockSynAlert).showError(DisplayConstants.ERROR_ALL_FIELDS_REQUIRED); verify(mockView).setPassword2InError(true); // unmatching second password Mockito.reset(mockView); when(mockView.getCurrentPasswordField()).thenReturn(password); when(mockView.getPassword1Field()).thenReturn(newPassword); when(mockView.getPassword2Field()).thenReturn(newPassword + "abc"); presenter.changePassword(); verify(mockView).getCurrentPasswordField(); verify(mockView).getPassword1Field(); verify(mockView).getPassword2Field(); verify(mockSynAlert).showError(DisplayConstants.PASSWORDS_MISMATCH); verify(mockView).setPassword2InError(true); } @Test public void testChangePasswordPasswordSuccess() { AsyncMockStubber.callSuccessWith(testUser).when(mockAuthenticationController).loginUser(eq(username), eq(password), any(AsyncCallback.class)); when(mockView.getCurrentPasswordField()).thenReturn(password); when(mockView.getPassword1Field()).thenReturn(newPassword); when(mockView.getPassword2Field()).thenReturn(newPassword); presenter.changePassword(); verify(mockView).getCurrentPasswordField(); verify(mockView).getPassword1Field(); verify(mockView).getPassword2Field(); verify(mockView).setChangePasswordEnabled(false); verify(mockUserService).changePassword(anyString(), anyString(), any(AsyncCallback.class)); } @Test public void testClearPasswordErrors() { presenter.clearPasswordErrors(); verify(mockSynAlert).clear(); verify(mockView).setCurrentPasswordInError(false); verify(mockView).setPassword1InError(false); verify(mockView).setPassword2InError(false); } }
package com.alibaba.ttl; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.WeakHashMap; import java.util.concurrent.Callable; import java.util.function.Supplier; import java.util.logging.Level; import java.util.logging.Logger; /** * {@link TransmittableThreadLocal} can transmit value from the thread of submitting task to the thread of executing task. * <p> * Note: {@link TransmittableThreadLocal} extends {@link java.lang.InheritableThreadLocal}, * so {@link TransmittableThreadLocal} first is a {@link java.lang.InheritableThreadLocal}. * * @author Jerry Lee (oldratlee at gmail dot com) * @see TtlRunnable * @see TtlCallable * @since 0.10.0 */ public class TransmittableThreadLocal<T> extends InheritableThreadLocal<T> { private static final Logger logger = Logger.getLogger(TransmittableThreadLocal.class.getName()); /** * Computes the value for this transmittable thread-local variable * as a function of the source thread's value at the time the task * Object is created. This method is called from {@link TtlRunnable} or * {@link TtlCallable} when it create, before the task is started. * <p> * This method merely returns reference of its source thread value, and should be overridden * if a different behavior is desired. * * @since 1.0.0 */ protected T copy(T parentValue) { return parentValue; } /** * Callback method before task object({@link TtlRunnable}/{@link TtlCallable}) execute. * <p> * Default behavior is do nothing, and should be overridden * if a different behavior is desired. * <p> * Do not throw any exception, just ignored. * * @since 1.2.0 */ protected void beforeExecute() { } /** * Callback method after task object({@link TtlRunnable}/{@link TtlCallable}) execute. * <p> * Default behavior is do nothing, and should be overridden * if a different behavior is desired. * <p> * Do not throw any exception, just ignored. * * @since 1.2.0 */ protected void afterExecute() { } @Override public final T get() { T value = super.get(); if (null != value) { addValue(); } return value; } @Override public final void set(T value) { super.set(value); if (null == value) { // may set null to remove value removeValue(); } else { addValue(); } } @Override public final void remove() { removeValue(); super.remove(); } private void superRemove() { super.remove(); } private T copyValue() { return copy(get()); } // Note about holder: // 1. The value of holder is type Map<TransmittableThreadLocal<?>, ?> (WeakHashMap implementation), // but it is used as *set*. // 2. WeakHashMap support null value. private static InheritableThreadLocal<Map<TransmittableThreadLocal<?>, ?>> holder = new InheritableThreadLocal<Map<TransmittableThreadLocal<?>, ?>>() { @Override protected Map<TransmittableThreadLocal<?>, ?> initialValue() { return new WeakHashMap<TransmittableThreadLocal<?>, Object>(); } @Override protected Map<TransmittableThreadLocal<?>, ?> childValue(Map<TransmittableThreadLocal<?>, ?> parentValue) { return new WeakHashMap<TransmittableThreadLocal<?>, Object>(parentValue); } }; private void addValue() { if (!holder.get().containsKey(this)) { holder.get().put(this, null); // WeakHashMap supports null value. } } private void removeValue() { holder.get().remove(this); } private static void doExecuteCallback(boolean isBefore) { for (Map.Entry<TransmittableThreadLocal<?>, ?> entry : holder.get().entrySet()) { TransmittableThreadLocal<?> threadLocal = entry.getKey(); try { if (isBefore) { threadLocal.beforeExecute(); } else { threadLocal.afterExecute(); } } catch (Throwable t) { if (logger.isLoggable(Level.WARNING)) { logger.log(Level.WARNING, "TTL exception when " + (isBefore ? "beforeExecute" : "afterExecute") + ", cause: " + t.toString(), t); } } } } /** * Debug only method! */ static void dump(@Nullable String title) { if (title != null && title.length() > 0) { System.out.printf("Start TransmittableThreadLocal[%s] Dump...\n", title); } else { System.out.println("Start TransmittableThreadLocal Dump..."); } for (Map.Entry<TransmittableThreadLocal<?>, ?> entry : holder.get().entrySet()) { final TransmittableThreadLocal<?> key = entry.getKey(); System.out.println(key.get()); } System.out.println("TransmittableThreadLocal Dump end!"); } /** * Debug only method! */ static void dump() { dump(null); } /** * {@link Transmitter} transmit all {@link TransmittableThreadLocal} values of current thread to * other thread by static method {@link #capture()} =&gt; {@link #replay(Object)} =&gt; {@link #restore(Object)} (aka {@code CRR} operation). * <p> * {@link Transmitter} is <b><i>internal</i></b> manipulation api for <b><i>framework/middleware integration</i></b>; * In general, you will <b><i>never</i></b> use it in the <i>biz/application code</i>! * <p> * Below is the example code: * * <pre><code> * /////////////////////////////////////////////////////////////////////////// * // in thread A, capture all TransmittableThreadLocal values of thread A * /////////////////////////////////////////////////////////////////////////// * * Object captured = Transmitter.capture(); // (1) * * /////////////////////////////////////////////////////////////////////////// * // in thread B * /////////////////////////////////////////////////////////////////////////// * * // replay all TransmittableThreadLocal values from thread A * Object backup = Transmitter.replay(captured); // (2) * try { * // your biz logic, run with the TransmittableThreadLocal values of thread B * System.out.println("Hello"); * // ... * return "World"; * } finally { * // restore the TransmittableThreadLocal of thread B when replay * Transmitter.restore(backup); (3) * } * </code></pre> * <p> * see the implementation code of {@link TtlRunnable} and {@link TtlCallable} for more actual code sample. * <hr> * Of course, {@link #replay(Object)} and {@link #restore(Object)} operation can be simplified * by util methods {@link #runCallableWithCaptured(Object, Callable)} or {@link #runSupplierWithCaptured(Object, Supplier)} * and the adorable {@code Java 8 lambda syntax}. * <p> * Below is the example code: * * <pre><code> * /////////////////////////////////////////////////////////////////////////// * // in thread A, capture all TransmittableThreadLocal values of thread A * /////////////////////////////////////////////////////////////////////////// * * Object captured = Transmitter.capture(); // (1) * * /////////////////////////////////////////////////////////////////////////// * // in thread B * /////////////////////////////////////////////////////////////////////////// * * String result = runSupplierWithCaptured(captured, () -&gt; { * // your biz logic, run with the TransmittableThreadLocal values of thread A * System.out.println("Hello"); * ... * return "World"; * }); // (2) + (3) * </code></pre> * <p> * The reason of providing 2 util methods is the different {@code throws Exception} type so as to satisfy your biz logic({@code lambda}): * <ol> * <li>{@link #runCallableWithCaptured(Object, Callable)}: {@code throws Exception}</li> * <li>{@link #runSupplierWithCaptured(Object, Supplier)}: No {@code throws}</li> * </ol> * <p> * If you need the different {@code throws Exception} type, * you can define your own util method(function interface({@code lambda})) with your own {@code throws Exception} type. * * @author Yang Fang (snoop dot fy at gmail dot com) * @author Jerry Lee (oldratlee at gmail dot com) * @see TtlRunnable * @see TtlCallable * @since 2.3.0 */ public static class Transmitter { /** * Capture all {@link TransmittableThreadLocal} values in current thread. * * @return the captured {@link TransmittableThreadLocal} values * @since 2.3.0 */ @Nonnull public static Object capture() { Map<TransmittableThreadLocal<?>, Object> captured = new HashMap<TransmittableThreadLocal<?>, Object>(); for (TransmittableThreadLocal<?> threadLocal : holder.get().keySet()) { captured.put(threadLocal, threadLocal.copyValue()); } return captured; } /** * Replay the captured {@link TransmittableThreadLocal} values from {@link #capture()}, * and return the backup {@link TransmittableThreadLocal} values in current thread before replay. * * @param captured captured {@link TransmittableThreadLocal} values from other thread from {@link #capture()} * @return the backup {@link TransmittableThreadLocal} values before replay * @see #capture() * @since 2.3.0 */ @Nonnull public static Object replay(@Nonnull Object captured) { @SuppressWarnings("unchecked") Map<TransmittableThreadLocal<?>, Object> capturedMap = (Map<TransmittableThreadLocal<?>, Object>) captured; Map<TransmittableThreadLocal<?>, Object> backup = new HashMap<TransmittableThreadLocal<?>, Object>(); for (Iterator<? extends Map.Entry<TransmittableThreadLocal<?>, ?>> iterator = holder.get().entrySet().iterator(); iterator.hasNext(); ) { Map.Entry<TransmittableThreadLocal<?>, ?> next = iterator.next(); TransmittableThreadLocal<?> threadLocal = next.getKey(); // backup backup.put(threadLocal, threadLocal.get()); // clear the TTL values that is not in captured // avoid the extra TTL values after replay when run task if (!capturedMap.containsKey(threadLocal)) { iterator.remove(); threadLocal.superRemove(); } } // set values to captured TTL setTtlValuesTo(capturedMap); // call beforeExecute callback doExecuteCallback(true); return backup; } /** * Restore the backup {@link TransmittableThreadLocal} values from {@link Transmitter#replay(Object)}. * * @param backup the backup {@link TransmittableThreadLocal} values from {@link Transmitter#replay(Object)} * @see #replay(Object) * @since 2.3.0 */ public static void restore(@Nonnull Object backup) { @SuppressWarnings("unchecked") Map<TransmittableThreadLocal<?>, Object> backupMap = (Map<TransmittableThreadLocal<?>, Object>) backup; // call afterExecute callback doExecuteCallback(false); for (Iterator<? extends Map.Entry<TransmittableThreadLocal<?>, ?>> iterator = holder.get().entrySet().iterator(); iterator.hasNext(); ) { Map.Entry<TransmittableThreadLocal<?>, ?> next = iterator.next(); TransmittableThreadLocal<?> threadLocal = next.getKey(); // clear the TTL values that is not in backup // avoid the extra TTL values after restore if (!backupMap.containsKey(threadLocal)) { iterator.remove(); threadLocal.superRemove(); } } // restore TTL values setTtlValuesTo(backupMap); } private static void setTtlValuesTo(@Nonnull Map<TransmittableThreadLocal<?>, Object> ttlValues) { for (Map.Entry<TransmittableThreadLocal<?>, Object> entry : ttlValues.entrySet()) { @SuppressWarnings("unchecked") TransmittableThreadLocal<Object> threadLocal = (TransmittableThreadLocal<Object>) entry.getKey(); threadLocal.set(entry.getValue()); } } /** * Util method for simplifying {@link #replay(Object)} and {@link #restore(Object)} operation. * * @param captured captured {@link TransmittableThreadLocal} values from other thread from {@link #capture()} * @param bizLogic biz logic * @param <R> the return type of biz logic * @return the return value of biz logic * @see #capture() * @see #replay(Object) * @see #restore(Object) * @since 2.3.1 */ public static <R> R runSupplierWithCaptured(@Nonnull Object captured, Supplier<R> bizLogic) { Object backup = replay(captured); try { return bizLogic.get(); } finally { restore(backup); } } /** * Util method for simplifying {@link #replay(Object)} and {@link #restore(Object)} operation. * * @param captured captured {@link TransmittableThreadLocal} values from other thread from {@link #capture()} * @param bizLogic biz logic * @param <R> the return type of biz logic * @return the return value of biz logic * @throws Exception exception threw by biz logic * @see #capture() * @see #replay(Object) * @see #restore(Object) * @since 2.3.1 */ public static <R> R runCallableWithCaptured(Object captured, Callable<R> bizLogic) throws Exception { Object backup = replay(captured); try { return bizLogic.call(); } finally { restore(backup); } } private Transmitter() { throw new InstantiationError("Must not instantiate this class"); } } }
package com.hazelcast.stabilizer.coordinator; import com.hazelcast.logging.ILogger; import com.hazelcast.logging.Logger; import com.hazelcast.stabilizer.Utils; import com.hazelcast.stabilizer.coordinator.remoting.AgentClient; import com.hazelcast.stabilizer.coordinator.remoting.AgentsClient; import com.hazelcast.stabilizer.worker.commands.GetOperationCountCommand; import java.text.NumberFormat; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; /** * Responsible for collecting performance metrics from the agents and logging/storing it. */ public class PerformanceMonitor extends Thread { private final ILogger log = Logger.getLogger(PerformanceMonitor.class); private final NumberFormat performanceFormat = NumberFormat.getInstance(Locale.US); private final AgentsClient client; private final Coordinator coordinator; private final ConcurrentMap<AgentClient, Long> operationCountPerAgent = new ConcurrentHashMap<AgentClient, Long>(); private long previousCount = 0; public long previousTime = System.currentTimeMillis(); public PerformanceMonitor(Coordinator coordinator) { this.client = coordinator.agentsClient; this.coordinator = coordinator; } @Override public void run() { for (; ; ) { Utils.sleepSeconds(10); try { checkPerformance(); } catch (Throwable t) { log.severe(t); } } } private void checkPerformance() { GetOperationCountCommand command = new GetOperationCountCommand(); Map<AgentClient, List<Long>> result = client.executeOnAllWorkersDetailed(command); long totalCount = 0; for (Map.Entry<AgentClient, List<Long>> entry : result.entrySet()) { AgentClient agentClient = entry.getKey(); Long countPerAgent = operationCountPerAgent.get(agentClient); if (countPerAgent == null) { countPerAgent = 0l; } for (Long value : entry.getValue()) { if (value != null) { totalCount += value; countPerAgent += value; } } operationCountPerAgent.put(agentClient, countPerAgent); } long delta = totalCount - previousCount; long currentMs = System.currentTimeMillis(); long durationMs = currentMs - previousTime; coordinator.performance = (delta * 1000d) / durationMs; coordinator.operationCount = totalCount; previousTime = currentMs; previousCount = totalCount; } public String getDetailedPerformanceInfo(int duration) { long totalOperations = 0; for (Map.Entry<AgentClient, Long> entry : operationCountPerAgent.entrySet()) { totalOperations += entry.getValue(); } StringBuffer sb = new StringBuffer(); sb.append("Total operations executed: "+totalOperations+"\n"); for (Map.Entry<AgentClient, Long> entry : operationCountPerAgent.entrySet()) { AgentClient client = entry.getKey(); long operationCount = entry.getValue(); double percentage = 100* (operationCount * 1.0d) / totalOperations; double performance = (operationCount * 1.0d) / duration; sb.append(" Agent: ") .append(client.getPublicAddress()) .append(" operations: ") .append(performanceFormat.format(operationCount)) .append(" operations/second: ") .append(performanceFormat.format(performance)) .append(" share: ") .append(performanceFormat.format(percentage)) .append(" %\n"); } return sb.toString(); } }
package com.braintreegateway; /** * An Enum representing all of the validation errors from the gateway. */ public enum ValidationErrorCode { ADDRESS_CANNOT_BE_BLANK("81801"), ADDRESS_COMPANY_IS_INVALID("91821"), ADDRESS_COMPANY_IS_TOO_LONG("81802"), ADDRESS_COUNTRY_CODE_ALPHA2_IS_NOT_ACCEPTED("91814"), ADDRESS_COUNTRY_CODE_ALPHA3_IS_NOT_ACCEPTED("91816"), ADDRESS_COUNTRY_CODE_NUMERIC_IS_NOT_ACCEPTED("91817"), ADDRESS_COUNTRY_NAME_IS_NOT_ACCEPTED("91803"), ADDRESS_EXTENDED_ADDRESS_IS_INVALID("91823"), ADDRESS_EXTENDED_ADDRESS_IS_TOO_LONG("81804"), ADDRESS_FIRST_NAME_IS_INVALID("91819"), ADDRESS_FIRST_NAME_IS_TOO_LONG("81805"), ADDRESS_INCONSISTENT_COUNTRY("91815"), ADDRESS_IS_INVALID("91828"), ADDRESS_LAST_NAME_IS_INVALID("91820"), ADDRESS_LAST_NAME_IS_TOO_LONG("81806"), ADDRESS_LOCALITY_IS_INVALID("91824"), ADDRESS_LOCALITY_IS_TOO_LONG("81807"), ADDRESS_POSTAL_CODE_INVALID_CHARACTERS("81813"), ADDRESS_POSTAL_CODE_IS_INVALID("91826"), ADDRESS_POSTAL_CODE_IS_REQUIRED("81808"), ADDRESS_POSTAL_CODE_IS_REQUIRED_FOR_CARD_BRAND_AND_PROCESSOR("81828"), ADDRESS_POSTAL_CODE_IS_TOO_LONG("81809"), ADDRESS_REGION_IS_INVALID("91825"), ADDRESS_REGION_IS_TOO_LONG("81810"), ADDRESS_STATE_IS_INVALID_FOR_SELLER_PROTECTION("81827"), ADDRESS_STREET_ADDRESS_IS_INVALID("91822"), ADDRESS_STREET_ADDRESS_IS_REQUIRED("81811"), ADDRESS_STREET_ADDRESS_IS_TOO_LONG("81812"), ADDRESS_TOO_MANY_ADDRESSES_PER_CUSTOMER("91818"), APPLE_PAY_CARDS_ARE_NOT_ACCEPTED("83501"), APPLE_PAY_CUSTOMER_ID_IS_REQUIRED_FOR_VAULTING("83502"), APPLE_PAY_TOKEN_IS_IN_USE("93503"), APPLE_PAY_PAYMENT_METHOD_NONCE_CONSUMED("93504"), APPLE_PAY_PAYMENT_METHOD_NONCE_UNKNOWN("93505"), APPLE_PAY_PAYMENT_METHOD_NONCE_LOCKED("93506"), APPLE_PAY_PAYMENT_METHOD_NONCE_CARD_TYPE_IS_NOT_ACCEPTED("83518"), APPLE_PAY_CANNOT_UPDATE_APPLE_PAY_CARD_USING_PAYMENT_METHOD_NONCE("93507"), APPLE_PAY_NUMBER_IS_REQUIRED("93508"), APPLE_PAY_EXPIRATION_MONTH_IS_REQUIRED("93509"), APPLE_PAY_EXPIRATION_YEAR_IS_REQUIRED("93510"), APPLE_PAY_CRYPTOGRAM_IS_REQUIRED("93511"), APPLE_PAY_DECRYPTION_FAILED("83512"), APPLE_PAY_DISABLED("93513"), APPLE_PAY_MERCHANT_NOT_CONFIGURED("93514"), APPLE_PAY_MERCHANT_KEYS_ALREADY_CONFIGURED("93515"), APPLE_PAY_MERCHANT_KEYS_NOT_CONFIGURED("93516"), APPLE_PAY_CERTIFICATE_INVALID("93517"), APPLE_PAY_CERTIFICATE_MISMATCH("93519"), APPLE_PAY_INVALID_TOKEN("83520"), APPLE_PAY_PRIVATE_KEY_MISMATCH("93521"), APPLE_PAY_KEY_MISMATCH_STORING_CERTIFICATE("93522"), AUTHORIZATION_FINGERPRINT_MISSING_FINGERPRINT("93201"), AUTHORIZATION_FINGERPRINT_INVALID_FORMAT("93202"), AUTHORIZATION_FINGERPRINT_SIGNATURE_REVOKED("93203"), AUTHORIZATION_FINGERPRINT_INVALID_CREATED_AT("93204"), AUTHORIZATION_FINGERPRINT_INVALID_PUBLIC_KEY("93205"), AUTHORIZATION_FINGERPRINT_INVALID_SIGNATURE("93206"), AUTHORIZATION_FINGERPRINT_OPTIONS_NOT_ALLOWED_WITHOUT_CUSTOMER("93207"), CLIENT_TOKEN_MAKE_DEFAULT_REQUIRES_CUSTOMER_ID("92801"), CLIENT_TOKEN_VERIFY_CARD_REQUIRES_CUSTOMER_ID("92802"), CLIENT_TOKEN_FAIL_ON_DUPLICATE_PAYMENT_METHOD_REQUIRES_CUSTOMER_ID("92803"), CLIENT_TOKEN_CUSTOMER_DOES_NOT_EXIST("92804"), CLIENT_TOKEN_PROXY_MERCHANT_DOES_NOT_EXIST("92805"), CLIENT_TOKEN_UNSUPPORTED_VERSION("92806"), CLIENT_TOKEN_MERCHANT_ACCOUNT_DOES_NOT_EXIST("92807"), CREDIT_CARD_BILLING_ADDRESS_CONFLICT("91701"), CREDIT_CARD_BILLING_ADDRESS_FORMAT_IS_INVALID("91744"), CREDIT_CARD_BILLING_ADDRESS_ID_IS_INVALID("91702"), CREDIT_CARD_CANNOT_UPDATE_CARD_USING_PAYMENT_METHOD_NONCE("91735"), CREDIT_CARD_CARDHOLDER_NAME_IS_TOO_LONG("81723"), CREDIT_CARD_CREDIT_CARD_TYPE_IS_NOT_ACCEPTED("81703"), CREDIT_CARD_CREDIT_CARD_TYPE_IS_NOT_ACCEPTED_BY_SUBSCRIPTION_MERCHANT_ACCOUNT("81718"), CREDIT_CARD_CUSTOMER_ID_IS_INVALID("91705"), CREDIT_CARD_CUSTOMER_ID_IS_REQUIRED("91704"), CREDIT_CARD_CVV_IS_INVALID("81707"), CREDIT_CARD_CVV_IS_REQUIRED("81706"), CREDIT_CARD_CVV_VERIFICATION_FAILED("81736"), CREDIT_CARD_DUPLICATE_CARD_EXISTS("81724"), CREDIT_CARD_EXPIRATION_DATE_CONFLICT("91708"), CREDIT_CARD_EXPIRATION_DATE_IS_INVALID("81710"), CREDIT_CARD_EXPIRATION_DATE_IS_REQUIRED("81709"), CREDIT_CARD_EXPIRATION_DATE_YEAR_IS_INVALID("81711"), CREDIT_CARD_EXPIRATION_MONTH_IS_INVALID("81712"), CREDIT_CARD_EXPIRATION_YEAR_IS_INVALID("81713"), CREDIT_CARD_INVALID_PARAMS_FOR_CREDIT_CARD_UPDATE("91745"), CREDIT_CARD_INVALID_VENMO_SDK_PAYMENT_METHOD_CODE("91727"), CREDIT_CARD_NUMBER_HAS_INVALID_LENGTH("81716"), CREDIT_CARD_NUMBER_IS_INVALID("81715"), CREDIT_CARD_NUMBER_IS_PROHIBITED("81750"), CREDIT_CARD_NUMBER_IS_REQUIRED("81714"), CREDIT_CARD_NUMBER_LENGTH_IS_INVALID("81716"), CREDIT_CARD_NUMBER_MUST_BE_TEST_NUMBER("81717"), CREDIT_CARD_OPTIONS_UPDATE_EXISTING_TOKEN_IS_INVALID("91723"), CREDIT_CARD_OPTIONS_UPDATE_EXISTING_TOKEN_NOT_ALLOWED("91729"), CREDIT_CARD_OPTIONS_VERIFICATION_AMOUNT_CANNOT_BE_NEGATIVE("91739"), CREDIT_CARD_OPTIONS_VERIFICATION_AMOUNT_FORMAT_IS_INVALID("91740"), CREDIT_CARD_OPTIONS_VERIFICATION_AMOUNT_IS_TOO_LARGE("91752"), CREDIT_CARD_OPTIONS_VERIFICATION_AMOUNT_NOT_SUPPORTED_BY_PROCESSOR("91741"), CREDIT_CARD_OPTIONS_VERIFICATION_MERCHANT_ACCOUNT_ID_IS_INVALID("91728"), CREDIT_CARD_OPTIONS_VERIFICATION_MERCHANT_ACCOUNT_IS_FORBIDDEN("91743"), CREDIT_CARD_OPTIONS_VERIFICATION_MERCHANT_ACCOUNT_IS_SUSPENDED("91742"), CREDIT_CARD_OPTIONS_VERIFICATION_MERCHANT_ACCOUNT_CANNOT_BE_SUB_MERCHANT_ACCOUNT("91755"), CREDIT_CARD_OPTIONS_VERIFICATION_ACCOUNT_TYPE_IS_INVALID("91757"), CREDIT_CARD_OPTIONS_VERIFICATION_ACCOUNT_TYPE_NOT_SUPPORTED("91758"), CREDIT_CARD_PAYMENT_METHOD_CONFLICT("81725"), CREDIT_CARD_PAYMENT_METHOD_IS_NOT_A_CREDIT_CARD("91738"), CREDIT_CARD_PAYMENT_METHOD_NONCE_CARD_TYPE_IS_NOT_ACCEPTED("91734"), CREDIT_CARD_PAYMENT_METHOD_NONCE_CONSUMED("91731"), CREDIT_CARD_PAYMENT_METHOD_NONCE_LOCKED("91733"), CREDIT_CARD_PAYMENT_METHOD_NONCE_UNKNOWN("91732"), CREDIT_CARD_POSTAL_CODE_VERIFICATION_FAILED("81737"), CREDIT_CARD_TOKEN_FORMAT_IS_INVALID("91718"), CREDIT_CARD_TOKEN_INVALID("91718"), CREDIT_CARD_TOKEN_IS_IN_USE("91719"), CREDIT_CARD_TOKEN_IS_NOT_ALLOWED("91721"), CREDIT_CARD_TOKEN_IS_REQUIRED("91722"), CREDIT_CARD_TOKEN_IS_TOO_LONG("91720"), CREDIT_CARD_VENMO_SDK_PAYMENT_METHOD_CODE_CARD_TYPE_IS_NOT_ACCEPTED("91726"), CREDIT_CARD_VERIFICATION_NOT_SUPPORTED_ON_THIS_MERCHANT_ACCOUNT("91730"), CUSTOMER_COMPANY_IS_TOO_LONG("81601"), CUSTOMER_CUSTOM_FIELD_IS_INVALID("91602"), CUSTOMER_CUSTOM_FIELD_IS_TOO_LONG("81603"), CUSTOMER_EMAIL_FORMAT_IS_INVALID("81604"), CUSTOMER_EMAIL_IS_INVALID("81604"), CUSTOMER_EMAIL_IS_REQUIRED("81606"), CUSTOMER_EMAIL_IS_TOO_LONG("81605"), CUSTOMER_FAX_IS_TOO_LONG("81607"), CUSTOMER_FIRST_NAME_IS_TOO_LONG("81608"), CUSTOMER_ID_IS_INVAILD("91610"), //Deprecated CUSTOMER_ID_IS_INVALID("91610"), //Deprecated CUSTOMER_ID_IS_IN_USE("91609"), CUSTOMER_ID_IS_NOT_ALLOWED("91611"), CUSTOMER_ID_IS_REQUIRED("91613"), CUSTOMER_ID_IS_TOO_LONG("91612"), CUSTOMER_LAST_NAME_IS_TOO_LONG("81613"), CUSTOMER_PHONE_IS_TOO_LONG("81614"), CUSTOMER_VAULTED_PAYMENT_INSTRUMENT_NONCE_BELONGS_TO_DIFFERENT_CUSTOMER("91617"), CUSTOMER_WEBSITE_FORMAT_IS_INVALID("81616"), CUSTOMER_WEBSITE_IS_INVALID("81616"), CUSTOMER_WEBSITE_IS_TOO_LONG("81615"), DESCRIPTOR_DYNAMIC_DESCRIPTORS_DISABLED("92203"), DESCRIPTOR_INTERNATIONAL_NAME_FORMAT_IS_INVALID("92204"), DESCRIPTOR_INTERNATIONAL_PHONE_FORMAT_IS_INVALID("92205"), DESCRIPTOR_NAME_FORMAT_IS_INVALID("92201"), DESCRIPTOR_PHONE_FORMAT_IS_INVALID("92202"), DESCRIPTOR_URL_FORMAT_IS_INVALID("92206"), DISPUTE_CAN_ONLY_ADD_EVIDENCE_TO_OPEN_DISPUTE("95701"), DISPUTE_CAN_ONLY_REMOVE_EVIDENCE_FROM_OPEN_DISPUTE("95702"), DISPUTE_CAN_ONLY_ADD_EVIDENCE_DOCUMENT_TO_DISPUTE("95703"), DISPUTE_CAN_ONLY_ACCEPT_OPEN_DISPUTE("95704"), DISPUTE_CAN_ONLY_FINALIZE_OPEN_DISPUTE("95705"), DISPUTE_CAN_ONLY_CREATE_EVIDENCE_WITH_VALID_CATEGORY("95706"), DISPUTE_EVIDENCE_CONTENT_DATE_INVALID("95707"), DISPUTE_EVIDENCE_CONTENT_TOO_LONG("95708"), DISPUTE_EVIDENCE_CONTENT_ARN_TOO_LONG("95709"), DISPUTE_EVIDENCE_CONTENT_PHONE_TOO_LONG("95710"), DISPUTE_EVIDENCE_CATEGORY_TEXT_ONLY("95711"), DISPUTE_EVIDENCE_CATEGORY_DOCUMENT_ONLY("95712"), DISPUTE_EVIDENCE_CATEGORY_NOT_FOR_REASON_CODE("95713"), DISPUTE_EVIDENCE_CATEGORY_DUPLICATE("95714"), DISPUTE_EVIDENCE_CONTENT_EMAIL_INVALID("95715"), DISPUTE_DIGITAL_GOODS_MISSING_EVIDENCE("95720"), DISPUTE_DIGITAL_GOODS_MISSING_DOWNLOAD_DATE("95721"), DISPUTE_NON_DISPUTED_PRIOR_TRANSACTION_EVIDENCE_MISSING_ARN("95722"), DISPUTE_NON_DISPUTED_PRIOR_TRANSACTION_EVIDENCE_MISSING_DATE("95723"), DISPUTE_RECURRING_TRANSACTION_EVIDENCE_MISSING_DATE("95724"), DISPUTE_RECURRING_TRANSACTION_EVIDENCE_MISSING_ARN("95725"), DISPUTE_VALID_EVIDENCE_REQUIRED_TO_FINALIZE("95726"), FAILED_AUTH_ADJUSTMENT_ALLOW_RETRY("95603"), FAILED_AUTH_ADJUSTMENT_HARD_DECLINE("95602"), FINAL_AUTH_SUBMIT_FOR_SETTLEMENT_FOR_DIFFERENT_AMOUNT("95601"), INDUSTRY_DATA_LEG_TRAVEL_FLIGHT_ARRIVAL_AIRPORT_CODE_IS_TOO_LONG("96301"), INDUSTRY_DATA_LEG_TRAVEL_FLIGHT_ARRIVAL_TIME_FORMAT_IS_INVALID("96302"), INDUSTRY_DATA_LEG_TRAVEL_FLIGHT_CARRIER_CODE_IS_TOO_LONG("96303"), INDUSTRY_DATA_LEG_TRAVEL_FLIGHT_CONJUNCTION_TICKET_IS_TOO_LONG("96304"), INDUSTRY_DATA_LEG_TRAVEL_FLIGHT_COUPON_NUMBER_IS_TOO_LONG("96305"), INDUSTRY_DATA_LEG_TRAVEL_FLIGHT_DEPARTURE_AIRPORT_CODE_IS_TOO_LONG("96306"), INDUSTRY_DATA_LEG_TRAVEL_FLIGHT_DEPARTURE_TIME_FORMAT_IS_INVALID("96307"), INDUSTRY_DATA_LEG_TRAVEL_FLIGHT_EXCHANGE_TICKET_IS_TOO_LONG("96308"), INDUSTRY_DATA_LEG_TRAVEL_FLIGHT_FARE_AMOUNT_CANNOT_BE_NEGATIVE("96309"), INDUSTRY_DATA_LEG_TRAVEL_FLIGHT_FARE_AMOUNT_FORMAT_IS_INVALID("96310"), INDUSTRY_DATA_LEG_TRAVEL_FLIGHT_FARE_AMOUNT_IS_TOO_LARGE("96311"), INDUSTRY_DATA_LEG_TRAVEL_FLIGHT_FARE_BASIS_CODE_IS_TOO_LONG("96312"), INDUSTRY_DATA_LEG_TRAVEL_FLIGHT_FEE_AMOUNT_CANNOT_BE_NEGATIVE("96313"), INDUSTRY_DATA_LEG_TRAVEL_FLIGHT_FEE_AMOUNT_FORMAT_IS_INVALID("96314"), INDUSTRY_DATA_LEG_TRAVEL_FLIGHT_FEE_AMOUNT_IS_TOO_LARGE("96315"), INDUSTRY_DATA_LEG_TRAVEL_FLIGHT_SERVICE_CLASS_IS_TOO_LONG("96316"), INDUSTRY_DATA_LEG_TRAVEL_FLIGHT_TAX_AMOUNT_CANNOT_BE_NEGATIVE("96317"), INDUSTRY_DATA_LEG_TRAVEL_FLIGHT_TAX_AMOUNT_FORMAT_IS_INVALID("96318"), INDUSTRY_DATA_LEG_TRAVEL_FLIGHT_TAX_AMOUNT_IS_TOO_LARGE("96319"), INDUSTRY_DATA_LEG_TRAVEL_FLIGHT_TICKET_NUMBER_IS_TOO_LONG("96320"), INDUSTRY_DATA_INDUSTRY_TYPE_IS_INVALID("93401"), INDUSTRY_DATA_LODGING_EMPTY_DATA("93402"), INDUSTRY_DATA_LODGING_FOLIO_NUMBER_IS_INVALID("93403"), INDUSTRY_DATA_LODGING_CHECK_IN_DATE_IS_INVALID("93404"), INDUSTRY_DATA_LODGING_CHECK_OUT_DATE_IS_INVALID("93405"), INDUSTRY_DATA_LODGING_CHECK_OUT_DATE_MUST_FOLLOW_CHECK_IN_DATE("93406"), INDUSTRY_DATA_LODGING_UNKNOWN_DATA_FIELD("93407"), INDUSTRY_DATA_TRAVEL_CRUISE_EMPTY_DATA("93408"), INDUSTRY_DATA_TRAVEL_CRUISE_UNKNOWN_DATA_FIELD("93409"), INDUSTRY_DATA_TRAVEL_CRUISE_TRAVEL_PACKAGE_IS_INVALID("93410"), INDUSTRY_DATA_TRAVEL_CRUISE_DEPARTURE_DATE_IS_INVALID("93411"), INDUSTRY_DATA_TRAVEL_CRUISE_LODGING_CHECK_IN_DATE_IS_INVALID("93412"), INDUSTRY_DATA_TRAVEL_CRUISE_LODGING_CHECKOUT_DATE_IS_INVALID("93413"), INDUSTRY_DATA_TRAVEL_FLIGHT_EMPTY_DATA("93414"), INDUSTRY_DATA_TRAVEL_FLIGHT_UNKNOWN_DATA_FIELD("93415"), INDUSTRY_DATA_TRAVEL_FLIGHT_CUSTOMER_CODE_IS_TOO_LONG("93416"), INDUSTRY_DATA_TRAVEL_FLIGHT_FARE_AMOUNT_CANNOT_BE_NEGATIVE("93417"), INDUSTRY_DATA_TRAVEL_FLIGHT_FARE_AMOUNT_FORMAT_IS_INVALID("93418"), INDUSTRY_DATA_TRAVEL_FLIGHT_FARE_AMOUNT_IS_TOO_LARGE("93419"), INDUSTRY_DATA_TRAVEL_FLIGHT_FEE_AMOUNT_CANNOT_BE_NEGATIVE("93420"), INDUSTRY_DATA_TRAVEL_FLIGHT_FEE_AMOUNT_FORMAT_IS_INVALID("93421"), INDUSTRY_DATA_TRAVEL_FLIGHT_FEE_AMOUNT_IS_TOO_LARGE("93422"), INDUSTRY_DATA_TRAVEL_FLIGHT_ISSUED_DATE_FORMAT_IS_INVALID("93423"), INDUSTRY_DATA_TRAVEL_FLIGHT_ISSUING_CARRIER_CODE_IS_TOO_LONG("93424"), INDUSTRY_DATA_TRAVEL_FLIGHT_PASSENGER_MIDDLE_INITIAL_IS_TOO_LONG("93425"), INDUSTRY_DATA_TRAVEL_FLIGHT_RESTRICTED_TICKET_IS_REQUIRED("93426"), INDUSTRY_DATA_TRAVEL_FLIGHT_TAX_AMOUNT_CANNOT_BE_NEGATIVE("93427"), INDUSTRY_DATA_TRAVEL_FLIGHT_TAX_AMOUNT_FORMAT_IS_INVALID("93428"), INDUSTRY_DATA_TRAVEL_FLIGHT_TAX_AMOUNT_IS_TOO_LARGE("93429"), INDUSTRY_DATA_TRAVEL_FLIGHT_TICKET_NUMBER_IS_TOO_LONG("93430"), INDUSTRY_DATA_TRAVEL_FLIGHT_LEGS_EXPECTED("93431"), INDUSTRY_DATA_TRAVEL_FLIGHT_TOO_MANY_LEGS("93432"), INDUSTRY_DATA_LODGING_ROOM_RATE_MUST_BE_GREATER_THAN_ZERO("93433"), INDUSTRY_DATA_LODGING_ROOM_RATE_FORMAT_IS_INVALID("93434"), INDUSTRY_DATA_LODGING_ROOM_RATE_IS_TOO_LARGE("93435"), INDUSTRY_DATA_LODGING_ROOM_TAX_MUST_BE_GREATER_THAN_ZERO("93436"), INDUSTRY_DATA_LODGING_ROOM_TAX_FORMAT_IS_INVALID("93437"), INDUSTRY_DATA_LODGING_ROOM_TAX_IS_TOO_LARGE("93438"), INDUSTRY_DATA_LODGING_NO_SHOW_INIDICATOR_IS_INVALID("93439"), INDUSTRY_DATA_LODGING_ADVANCED_DEPOSIT_INIDICATOR_IS_INVALID("93440"), INDUSTRY_DATA_LODGING_FIRE_SAFETY_INIDICATOR_IS_INVALID("93441"), INDUSTRY_DATA_LODGING_PROPERTY_PHONE_IS_INVALID("93442"), INDUSTRY_DATA_ADDITIONAL_CHARGE_KIND_IS_INVALID("96601"), INDUSTRY_DATA_ADDITIONAL_CHARGE_KIND_MUST_BE_UNIQUE("96602"), INDUSTRY_DATA_ADDITIONAL_CHARGE_AMOUNT_MUST_BE_GREATER_THAN_ZERO("96603"), INDUSTRY_DATA_ADDITIONAL_CHARGE_AMOUNT_FORMAT_IS_INVALID("96604"), INDUSTRY_DATA_ADDITIONAL_CHARGE_AMOUNT_IS_TOO_LARGE("96605"), INDUSTRY_DATA_ADDITIONAL_CHARGE_AMOUNT_IS_REQUIRED("96606"), TRANSACTION_LINE_ITEM_COMMODITY_CODE_IS_TOO_LONG("95801"), TRANSACTION_LINE_ITEM_DESCRIPTION_IS_TOO_LONG("95803"), TRANSACTION_LINE_ITEM_DISCOUNT_AMOUNT_FORMAT_IS_INVALID("95804"), TRANSACTION_LINE_ITEM_DISCOUNT_AMOUNT_IS_TOO_LARGE("95805"), TRANSACTION_LINE_ITEM_DISCOUNT_AMOUNT_CANNOT_BE_NEGATIVE("95806"), TRANSACTION_LINE_ITEM_DISCOUNT_AMOUNT_MUST_BE_GREATER_THAN_ZERO("95806"), // Deprecated as the amount may be zero. Use TRANSACTION_LINE_ITEM_DISCOUNT_AMOUNT_CANNOT_BE_NEGATIVE. TRANSACTION_LINE_ITEM_KIND_IS_INVALID("95807"), TRANSACTION_LINE_ITEM_KIND_IS_REQUIRED("95808"), TRANSACTION_LINE_ITEM_NAME_IS_REQUIRED("95822"), TRANSACTION_LINE_ITEM_NAME_IS_TOO_LONG("95823"), TRANSACTION_LINE_ITEM_PRODUCT_CODE_IS_TOO_LONG("95809"), TRANSACTION_LINE_ITEM_QUANTITY_FORMAT_IS_INVALID("95810"), TRANSACTION_LINE_ITEM_QUANTITY_IS_REQUIRED("95811"), TRANSACTION_LINE_ITEM_QUANTITY_IS_TOO_LARGE("95812"), TRANSACTION_LINE_ITEM_TOTAL_AMOUNT_FORMAT_IS_INVALID("95813"), TRANSACTION_LINE_ITEM_TOTAL_AMOUNT_IS_REQUIRED("95814"), TRANSACTION_LINE_ITEM_TOTAL_AMOUNT_IS_TOO_LARGE("95815"), TRANSACTION_LINE_ITEM_TOTAL_AMOUNT_MUST_BE_GREATER_THAN_ZERO("95816"), TRANSACTION_LINE_ITEM_UNIT_AMOUNT_FORMAT_IS_INVALID("95817"), TRANSACTION_LINE_ITEM_UNIT_AMOUNT_IS_REQUIRED("95818"), TRANSACTION_LINE_ITEM_UNIT_AMOUNT_IS_TOO_LARGE("95819"), TRANSACTION_LINE_ITEM_UNIT_AMOUNT_MUST_BE_GREATER_THAN_ZERO("95820"), TRANSACTION_LINE_ITEM_UNIT_OF_MEASURE_IS_TOO_LONG("95821"), TRANSACTION_LINE_ITEM_UNIT_TAX_AMOUNT_FORMAT_IS_INVALID("95824"), TRANSACTION_LINE_ITEM_UNIT_TAX_AMOUNT_IS_TOO_LARGE("95825"), TRANSACTION_LINE_ITEM_UNIT_TAX_AMOUNT_CANNOT_BE_NEGATIVE("95826"), TRANSACTION_LINE_ITEM_UNIT_TAX_AMOUNT_MUST_BE_GREATER_THAN_ZERO("95826"), // Deprecated as the amount may be zero. Use TRANSACTION_LINE_ITEM_UNIT_TAX_AMOUNT_CANNOT_BE_NEGATIVE. TRANSACTION_LINE_ITEM_TAX_AMOUNT_FORMAT_IS_INVALID("95827"), TRANSACTION_LINE_ITEM_TAX_AMOUNT_IS_TOO_LARGE("95828"), TRANSACTION_LINE_ITEM_TAX_AMOUNT_CANNOT_BE_NEGATIVE("95829"), TRANSACTION_LINE_ITEM_TAX_AMOUNT_MUST_BE_GREATER_THAN_ZERO("95829"), // Deprecated as the amount may be zero. Use TRANSACTION_LINE_ITEM_TAX_AMOUNT_CANNOT_BE_NEGATIVE. TRANSACTION_PAYMENT_INSTRUMENT_WITH_EXTERNAL_VAULT_IS_INVALID("915176"), TRANSACTION_EXTERNAL_VAULT_STATUS_WITH_PREVIOUS_NETWORK_TRANSACTION_ID_IS_INVALID("915177"), TRANSACTION_EXTERNAL_VAULT_CARD_TYPE_IS_INVALID("915178"), TRANSACTION_EXTERNAL_VAULT_PREVIOUS_NETWORK_TRANSACTION_ID_IS_INVALID("915179"), OAUTH_INVALID_GRANT("93801"), OAUTH_INVALID_CREDENTIALS("93802"), OAUTH_INVALID_SCOPE("93803"), OAUTH_INVALID_REQUEST("93804"), OAUTH_UNSUPPORTED_GRANT_TYPE("93805"), PAYMENT_METHOD_CUSTOMER_ID_IS_INVALID("93105"), PAYMENT_METHOD_CUSTOMER_ID_IS_REQUIRED("93104"), PAYMENT_METHOD_NONCE_IS_INVALID("93102"), PAYMENT_METHOD_PAYMENT_METHOD_PARAMS_ARE_REQUIRED("93101"), PAYMENT_METHOD_PARAMS_ARE_REQUIRED("93101"), PAYMENT_METHOD_NONCE_IS_REQUIRED("93103"), PAYMENT_METHOD_CANNOT_FORWARD_PAYMENT_METHOD_TYPE("93106"), PAYMENT_METHOD_PAYMENT_METHOD_NONCE_CONSUMED("93107"), PAYMENT_METHOD_PAYMENT_METHOD_NONCE_UNKNOWN("93108"), PAYMENT_METHOD_PAYMENT_METHOD_NONCE_LOCKED("93109"), PAYMENT_METHOD_NO_LONGER_SUPPORTED("93117"), PAYMENT_METHOD_OPTIONS_US_BANK_ACCOUNT_VERIFICATION_METHOD_IS_INVALID("93121"), PAYPAL_ACCOUNT_AUTH_EXPIRED("92911"), PAYPAL_ACCOUNT_CANNOT_HAVE_BOTH_ACCESS_TOKEN_AND_CONSENT_CODE("82903"), PAYPAL_ACCOUNT_CANNOT_HAVE_FUNDING_SOURCE_WITHOUT_ACCESS_TOKEN("92912"), PAYPAL_ACCOUNT_CANNOT_UPDATE_PAYPAL_ACCOUNT_USING_PAYMENT_METHOD_NONCE("92914"), PAYPAL_ACCOUNT_CANNOT_VAULT_ONE_TIME_USE_PAYPAL_ACCOUNT("82902"), PAYPAL_ACCOUNT_CONSENT_CODE_OR_ACCESS_TOKEN_IS_REQUIRED("82901"), PAYPAL_ACCOUNT_CUSTOMER_ID_IS_REQUIRED_FOR_VAULTING("82905"), PAYPAL_ACCOUNT_INVALID_FUNDING_SOURCE_SELECTION("92913"), PAYPAL_ACCOUNT_INVALID_PARAMS_FOR_PAYPAL_ACCOUNT_UPDATE("92915"), PAYPAL_ACCOUNT_PAYMENT_METHOD_NONCE_CONSUMED("92907"), PAYPAL_ACCOUNT_PAYMENT_METHOD_NONCE_LOCKED("92909"), PAYPAL_ACCOUNT_PAYMENT_METHOD_NONCE_UNKNOWN("92908"), PAYPAL_ACCOUNT_PAYPAL_ACCOUNTS_ARE_NOT_ACCEPTED("82904"), PAYPAL_ACCOUNT_PAYPAL_COMMUNICATION_ERROR("92910"), PAYPAL_ACCOUNT_TOKEN_IS_IN_USE("92906"), EUROPE_BANK_ACCOUNT_ACCOUNT_HOLDER_NAME_IS_REQUIRED("93003"), EUROPE_BANK_ACCOUNT_BIC_IS_REQUIRED("93002"), EUROPE_BANK_ACCOUNT_IBAN_IS_REQUIRED("93001"), DOCUMENT_UPLOAD_KIND_IS_INVALID("84901"), DOCUMENT_UPLOAD_FILE_IS_TOO_LARGE("84902"), DOCUMENT_UPLOAD_FILE_TYPE_IS_INVALID("84903"), DOCUMENT_UPLOAD_FILE_IS_MALFORMED_OR_ENCRYPTED("84904"), DOCUMENT_UPLOAD_FILE_IS_TOO_LONG("84905"), SEPA_MANDATE_ACCOUNT_HOLDER_NAME_IS_REQUIRED("83301"), SEPA_MANDATE_BIC_INVALID_CHARACTER("83306"), SEPA_MANDATE_BIC_IS_REQUIRED("83302"), SEPA_MANDATE_BIC_LENGTH_IS_INVALID("83307"), SEPA_MANDATE_BIC_UNSUPPORTED_COUNTRY("83308"), SEPA_MANDATE_BILLING_ADDRESS_CONFLICT("93311"), SEPA_MANDATE_BILLING_ADDRESS_ID_IS_INVALID("93312"), SEPA_MANDATE_IBAN_INVALID_CHARACTER("83305"), SEPA_MANDATE_IBAN_INVALID_FORMAT("83310"), SEPA_MANDATE_IBAN_IS_REQUIRED("83303"), SEPA_MANDATE_IBAN_UNSUPPORTED_COUNTRY("83309"), SEPA_MANDATE_TYPE_IS_REQUIRED("93304"), SEPA_MANDATE_TYPE_IS_INVALID("93313"), SETTLEMENT_BATCH_SUMMARY_SETTLEMENT_DATE_IS_INVALID("82302"), SETTLEMENT_BATCH_SUMMARY_SETTLEMENT_DATE_IS_REQUIRED("82301"), SETTLEMENT_BATCH_SUMMARY_CUSTOM_FIELD_IS_INVALID("82303"), SUBSCRIPTION_BILLING_DAY_OF_MONTH_CANNOT_BE_UPDATED("91918"), SUBSCRIPTION_BILLING_DAY_OF_MONTH_IS_INVALID("91914"), SUBSCRIPTION_BILLING_DAY_OF_MONTH_MUST_BE_NUMERIC("91913"), SUBSCRIPTION_CANNOT_ADD_DUPLICATE_ADDON_OR_DISCOUNT("91911"), SUBSCRIPTION_CANNOT_EDIT_CANCELED_SUBSCRIPTION("81901"), SUBSCRIPTION_CANNOT_EDIT_EXPIRED_SUBSCRIPTION("81910"), SUBSCRIPTION_CANNOT_EDIT_PRICE_CHANGING_FIELDS_ON_PAST_DUE_SUBSCRIPTION("91920"), SUBSCRIPTION_FIRST_BILLING_DATE_CANNOT_BE_IN_THE_PAST("91916"), SUBSCRIPTION_FIRST_BILLING_DATE_CANNOT_BE_UPDATED("91919"), SUBSCRIPTION_FIRST_BILLING_DATE_IS_INVALID("91915"), SUBSCRIPTION_ID_IS_IN_USE("81902"), SUBSCRIPTION_INCONSISTENT_NUMBER_OF_BILLING_CYCLES("91908"), SUBSCRIPTION_INCONSISTENT_START_DATE("91917"), SUBSCRIPTION_INVALID_REQUEST_FORMAT("91921"), SUBSCRIPTION_MERCHANT_ACCOUNT_DOES_NOT_SUPPORT_INSTRUMENT_TYPE("91930"), SUBSCRIPTION_MERCHANT_ACCOUNT_ID_IS_INVALID("91901"), SUBSCRIPTION_MISMATCH_CURRENCY_ISO_CODE("91923"), SUBSCRIPTION_MODIFICATION_ID_TO_REMOVE_IS_INVALID("92025"), SUBSCRIPTION_NUMBER_OF_BILLING_CYCLES_CANNOT_BE_BLANK("91912"), SUBSCRIPTION_NUMBER_OF_BILLING_CYCLES_IS_TOO_SMALL("91909"), SUBSCRIPTION_NUMBER_OF_BILLING_CYCLES_MUST_BE_GREATER_THAN_ZERO("91907"), SUBSCRIPTION_NUMBER_OF_BILLING_CYCLES_MUST_BE_NUMERIC("91906"), SUBSCRIPTION_PAYMENT_METHOD_NONCE_CARD_TYPE_IS_NOT_ACCEPTED("91924"), SUBSCRIPTION_PAYMENT_METHOD_NONCE_INSTRUMENT_TYPE_DOES_NOT_SUPPORT_SUBSCRIPTIONS("91929"), SUBSCRIPTION_PAYMENT_METHOD_NONCE_IS_INVALID("91925"), SUBSCRIPTION_PAYMENT_METHOD_NONCE_NOT_ASSOCIATED_WITH_CUSTOMER("91926"), SUBSCRIPTION_PAYMENT_METHOD_NONCE_UNVAULTED_CARD_IS_NOT_ACCEPTED("91927"), SUBSCRIPTION_PAYMENT_METHOD_TOKEN_CARD_TYPE_IS_NOT_ACCEPTED("91902"), SUBSCRIPTION_PAYMENT_METHOD_TOKEN_INSTRUMENT_TYPE_DOES_NOT_SUPPORT_SUBSCRIPTIONS("91928"), SUBSCRIPTION_PAYMENT_METHOD_TOKEN_IS_INVALID("91903"), SUBSCRIPTION_PAYMENT_METHOD_TOKEN_NOT_ASSOCIATED_WITH_CUSTOMER("91905"), SUBSCRIPTION_PLAN_BILLING_FREQUENCY_CANNOT_BE_UPDATED("91922"), SUBSCRIPTION_PLAN_ID_IS_INVALID("91904"), SUBSCRIPTION_PRICE_CANNOT_BE_BLANK("81903"), SUBSCRIPTION_PRICE_FORMAT_IS_INVALID("81904"), SUBSCRIPTION_PRICE_IS_TOO_LARGE("81923"), SUBSCRIPTION_STATUS_IS_CANCELED("81905"), SUBSCRIPTION_TOKEN_FORMAT_IS_INVALID("81906"), SUBSCRIPTION_TRIAL_DURATION_FORMAT_IS_INVALID("81907"), SUBSCRIPTION_TRIAL_DURATION_IS_REQUIRED("81908"), SUBSCRIPTION_TRIAL_DURATION_UNIT_IS_INVALID("81909"), SUBSCRIPTION_MODIFICATION_AMOUNT_CANNOT_BE_BLANK("92003"), SUBSCRIPTION_MODIFICATION_AMOUNT_IS_INVALID("92002"), SUBSCRIPTION_MODIFICATION_AMOUNT_IS_TOO_LARGE("92023"), SUBSCRIPTION_MODIFICATION_CANNOT_EDIT_MODIFICATIONS_ON_PAST_DUE_SUBSCRIPTION("92022"), SUBSCRIPTION_MODIFICATION_CANNOT_UPDATE_AND_REMOVE("92015"), SUBSCRIPTION_MODIFICATION_EXISTING_ID_IS_INCORRECT_KIND("92020"), SUBSCRIPTION_MODIFICATION_EXISTING_ID_IS_INVALID("92011"), SUBSCRIPTION_MODIFICATION_EXISTING_ID_IS_REQUIRED("92012"), SUBSCRIPTION_MODIFICATION_ID_TO_REMOVE_IS_INCORRECT_KIND("92021"), SUBSCRIPTION_MODIFICATION_ID_TO_REMOVE_IS_NOT_PRESENT("92016"), SUBSCRIPTION_MODIFICATION_INCONSISTENT_NUMBER_OF_BILLING_CYCLES("92018"), SUBSCRIPTION_MODIFICATION_INHERITED_FROM_ID_IS_INVALID("92013"), SUBSCRIPTION_MODIFICATION_INHERITED_FROM_ID_IS_REQUIRED("92014"), SUBSCRIPTION_MODIFICATION_MISSING("92024"), SUBSCRIPTION_MODIFICATION_NUMBER_OF_BILLING_CYCLES_CANNOT_BE_BLANK("92017"), SUBSCRIPTION_MODIFICATION_NUMBER_OF_BILLING_CYCLES_IS_INVALID("92005"), SUBSCRIPTION_MODIFICATION_NUMBER_OF_BILLING_CYCLES_MUST_BE_GREATER_THAN_ZERO("92019"), SUBSCRIPTION_MODIFICATION_QUANTITY_CANNOT_BE_BLANK("92004"), SUBSCRIPTION_MODIFICATION_QUANTITY_IS_INVALID("92001"), SUBSCRIPTION_MODIFICATION_QUANTITY_MUST_BE_GREATER_THAN_ZERO("92010"), TRANSACTION_AMOUNT_CANNOT_BE_NEGATIVE("81501"), TRANSACTION_AMOUNT_DOES_NOT_MATCH3_D_SECURE_AMOUNT("91585"), TRANSACTION_AMOUNT_DOES_NOT_MATCH_IDEAL_PAYMENT_AMOUNT("915144"), TRANSACTION_AMOUNT_FORMAT_IS_INVALID("81503"), TRANSACTION_AMOUNT_IS_INVALID("81503"), TRANSACTION_AMOUNT_IS_REQUIRED("81502"), TRANSACTION_AMOUNT_IS_TOO_LARGE("81528"), TRANSACTION_AMOUNT_MUST_BE_GREATER_THAN_ZERO("81531"), TRANSACTION_AMOUNT_NOT_SUPPORTED_BY_PROCESSOR("815193"), TRANSACTION_BILLING_ADDRESS_CONFLICT("91530"), TRANSACTION_CANNOT_BE_VOIDED("91504"), TRANSACTION_CANNOT_CANCEL_RELEASE("91562"), TRANSACTION_CANNOT_CLONE_CREDIT("91543"), TRANSACTION_CANNOT_CLONE_MARKETPLACE_TRANSACTION("915137"), TRANSACTION_CANNOT_CLONE_TRANSACTION_WITH_PAYPAL_ACCOUNT("91573"), TRANSACTION_CANNOT_CLONE_TRANSACTION_WITH_VAULT_CREDIT_CARD("91540"), TRANSACTION_CANNOT_CLONE_UNSUCCESSFUL_TRANSACTION("91542"), TRANSACTION_CANNOT_CLONE_VOICE_AUTHORIZATIONS("91541"), TRANSACTION_CANNOT_HOLD_IN_ESCROW("91560"), TRANSACTION_CANNOT_PARTIALLY_REFUND_ESCROWED_TRANSACTION("91563"), TRANSACTION_CANNOT_REFUND_CREDIT("91505"), TRANSACTION_CANNOT_REFUND_SETTLING_TRANSACTION("91574"), TRANSACTION_CANNOT_REFUND_UNLESS_SETTLED("91506"), TRANSACTION_CANNOT_REFUND_WITH_PENDING_MERCHANT_ACCOUNT("91559"), TRANSACTION_CANNOT_REFUND_WITH_SUSPENDED_MERCHANT_ACCOUNT("91538"), TRANSACTION_CANNOT_RELEASE_FROM_ESCROW("91561"), TRANSACTION_CANNOT_SUBMIT_FOR_PARTIAL_SETTLEMENT("915103"), TRANSACTION_CANNOT_SUBMIT_FOR_SETTLEMENT("91507"), TRANSACTION_CANNOT_UPDATE_DETAILS_NOT_SUBMITTED_FOR_SETTLEMENT("915129"), TRANSACTION_CHANNEL_IS_TOO_LONG("91550"), TRANSACTION_CREDIT_CARD_IS_REQUIRED("91508"), TRANSACTION_CUSTOM_FIELD_IS_INVALID("91526"), TRANSACTION_CUSTOM_FIELD_IS_TOO_LONG("81527"), TRANSACTION_CUSTOMER_DEFAULT_PAYMENT_METHOD_CARD_TYPE_IS_NOT_ACCEPTED("81509"), TRANSACTION_CUSTOMER_DOES_NOT_HAVE_CREDIT_CARD("91511"), TRANSACTION_CUSTOMER_ID_IS_INVALID("91510"), TRANSACTION_HAS_ALREADY_BEEN_REFUNDED("91512"), TRANSACTION_IDEAL_PAYMENT_NOT_COMPLETE("815141"), TRANSACTION_TOO_MANY_LINE_ITEMS("915157"), TRANSACTION_LINE_ITEMS_EXPECTED("915158"), TRANSACTION_DISCOUNT_AMOUNT_FORMAT_IS_INVALID("915159"), TRANSACTION_DISCOUNT_AMOUNT_CANNOT_BE_NEGATIVE("915160"), TRANSACTION_DISCOUNT_AMOUNT_IS_TOO_LARGE("915161"), TRANSACTION_SHIPPING_AMOUNT_FORMAT_IS_INVALID("915162"), TRANSACTION_SHIPPING_AMOUNT_CANNOT_BE_NEGATIVE("915163"), TRANSACTION_SHIPPING_AMOUNT_IS_TOO_LARGE("915164"), TRANSACTION_SHIPS_FROM_POSTAL_CODE_IS_TOO_LONG("915165"), TRANSACTION_SHIPS_FROM_POSTAL_CODE_IS_INVALID("915166"), TRANSACTION_SHIPS_FROM_POSTAL_CODE_INVALID_CHARACTERS("915167"), TRANSACTION_IDEAL_PAYMENTS_CANNOT_BE_VAULTED("915150"), TRANSACTION_MERCHANT_ACCOUNT_DOES_NOT_MATCH3_D_SECURE_MERCHANT_ACCOUNT("91584"), TRANSACTION_MERCHANT_ACCOUNT_DOES_NOT_MATCH_IDEAL_PAYMENT_MERCHANT_ACCOUNT("915143"), TRANSACTION_MERCHANT_ACCOUNT_DOES_NOT_SUPPORT_MOTO("91558"), TRANSACTION_MERCHANT_ACCOUNT_DOES_NOT_SUPPORT_REFUNDS("91547"), TRANSACTION_MERCHANT_ACCOUNT_ID_DOES_NOT_MATCH_SUBSCRIPTION("915180"), TRANSACTION_MERCHANT_ACCOUNT_ID_IS_INVALID("91513"), TRANSACTION_MERCHANT_ACCOUNT_IS_SUSPENDED("91514"), TRANSACTION_MERCHANT_ACCOUNT_NAME_IS_INVALID("91513"), //Deprecated TRANSACTION_OPTIONS_PAY_PAL_CUSTOM_FIELD_TOO_LONG("91580"), TRANSACTION_OPTIONS_SUBMIT_FOR_SETTLEMENT_IS_REQUIRED_FOR_CLONING("91544"), TRANSACTION_OPTIONS_SUBMIT_FOR_SETTLEMENT_IS_REQUIRED_FOR_PAYPAL_UNILATERAL("91582"), TRANSACTION_OPTIONS_USE_BILLING_FOR_SHIPPING_DISABLED("91572"), TRANSACTION_OPTIONS_VAULT_IS_DISABLED("91525"), TRANSACTION_OPTIONS_CREDIT_CARD_ACCOUNT_TYPE_IS_INVALID("915184"), TRANSACTION_OPTIONS_CREDIT_CARD_ACCOUNT_TYPE_NOT_SUPPORTED("915185"), TRANSACTION_OPTIONS_CREDIT_CARD_ACCOUNT_TYPE_DEBIT_DOES_NOT_SUPPORT_AUTHS("915186"), TRANSACTION_ORDER_ID_DOES_NOT_MATCH_IDEAL_PAYMENT_ORDER_ID("91503"), TRANSACTION_ORDER_ID_IS_REQUIRED_WITH_IDEAL_PAYMENT("91502"), TRANSACTION_ORDER_ID_IS_TOO_LONG("91501"), TRANSACTION_PAY_PAL_AUTH_EXPIRED("91579"), TRANSACTION_PAY_PAL_VAULT_RECORD_MISSING_DATA("91583"), TRANSACTION_PAYMENT_INSTRUMENT_NOT_SUPPORTED_BY_MERCHANT_ACCOUNT("91577"), TRANSACTION_PAYMENT_INSTRUMENT_TYPE_IS_NOT_ACCEPTED("915101"), TRANSACTION_PAYMENT_METHOD_CONFLICT("91515"), TRANSACTION_PAYMENT_METHOD_CONFLICT_WITH_VENMO_SDK("91549"), TRANSACTION_PAYMENT_METHOD_DOES_NOT_BELONG_TO_CUSTOMER("91516"), TRANSACTION_PAYMENT_METHOD_DOES_NOT_BELONG_TO_SUBSCRIPTION("91527"), TRANSACTION_PAYMENT_METHOD_NONCE_CARD_TYPE_IS_NOT_ACCEPTED("91567"), TRANSACTION_PAYMENT_METHOD_NONCE_CONSUMED("91564"), TRANSACTION_PAYMENT_METHOD_NONCE_HAS_NO_VALID_PAYMENT_INSTRUMENT_TYPE("91569"), TRANSACTION_PAYMENT_METHOD_NONCE_LOCKED("91566"), TRANSACTION_PAYMENT_METHOD_NONCE_UNKNOWN("91565"), TRANSACTION_PAYMENT_METHOD_TOKEN_CARD_TYPE_IS_NOT_ACCEPTED("91517"), TRANSACTION_PAYMENT_METHOD_TOKEN_IS_INVALID("91518"), TRANSACTION_PAYPAL_NOT_ENABLED("91576"), TRANSACTION_PROCESSOR_AUTHORIZATION_CODE_CANNOT_BE_SET("91519"), TRANSACTION_PROCESSOR_AUTHORIZATION_CODE_IS_INVALID("81520"), TRANSACTION_PROCESSOR_DOES_NOT_SUPPORT_AUTHS("915104"), TRANSACTION_PROCESSOR_DOES_NOT_SUPPORT_CREDITS("91546"), TRANSACTION_PROCESSOR_DOES_NOT_SUPPORT_MOTO_FOR_CARD_TYPE("915195"), TRANSACTION_PROCESSOR_DOES_NOT_SUPPORT_PARTIAL_SETTLEMENT("915102"), TRANSACTION_PROCESSOR_DOES_NOT_SUPPORT_UPDATING_DESCRIPTOR("915108"), TRANSACTION_PROCESSOR_DOES_NOT_SUPPORT_UPDATING_DETAILS("915130"), TRANSACTION_PROCESSOR_DOES_NOT_SUPPORT_UPDATING_ORDER_ID("915107"), TRANSACTION_PROCESSOR_DOES_NOT_SUPPORT_VOICE_AUTHORIZATIONS("91545"), TRANSACTION_PURCHASE_ORDER_NUMBER_IS_INVALID("91548"), TRANSACTION_PURCHASE_ORDER_NUMBER_IS_TOO_LONG("91537"), TRANSACTION_REFUND_AMOUNT_IS_TOO_LARGE("91521"), TRANSACTION_SERVICE_FEE_AMOUNT_CANNOT_BE_NEGATIVE("91554"), TRANSACTION_SERVICE_FEE_AMOUNT_FORMAT_IS_INVALID("91555"), TRANSACTION_SERVICE_FEE_AMOUNT_IS_TOO_LARGE("91556"), TRANSACTION_SERVICE_FEE_AMOUNT_NOT_ALLOWED_ON_MASTER_MERCHANT_ACCOUNT("91557"), TRANSACTION_SERVICE_FEE_IS_NOT_ALLOWED_ON_CREDITS("91552"), TRANSACTION_SERVICE_FEE_NOT_ACCEPTED_FOR_PAYPAL("91578"), TRANSACTION_SETTLEMENT_AMOUNT_IS_LESS_THAN_SERVICE_FEE_AMOUNT("91551"), TRANSACTION_SETTLEMENT_AMOUNT_IS_TOO_LARGE("91522"), TRANSACTION_SHIPPING_ADDRESS_DOESNT_MATCH_CUSTOMER("91581"), TRANSACTION_SUB_MERCHANT_ACCOUNT_REQUIRES_SERVICE_FEE_AMOUNT("91553"), TRANSACTION_SUBSCRIPTION_DOES_NOT_BELONG_TO_CUSTOMER("91529"), TRANSACTION_SUBSCRIPTION_ID_IS_INVALID("91528"), TRANSACTION_SUBSCRIPTION_STATUS_MUST_BE_PAST_DUE("91531"), TRANSACTION_TAX_AMOUNT_CANNOT_BE_NEGATIVE("81534"), TRANSACTION_TAX_AMOUNT_FORMAT_IS_INVALID("81535"), TRANSACTION_TAX_AMOUNT_IS_TOO_LARGE("81536"), TRANSACTION_US_BANK_ACCOUNT_NONCE_MUST_BE_PLAID_VERIFIED("915171"), TRANSACTION_US_BANK_ACCOUNT_NOT_VERIFIED("915172"), TRANSACTION_THREE_D_SECURE_AUTHENTICATION_FAILED("81571"), TRANSACTION_THREE_D_SECURE_TOKEN_IS_INVALID("91568"), TRANSACTION_THREE_D_SECURE_TRANSACTION_DATA_DOESNT_MATCH_VERIFY("91570"), TRANSACTION_THREE_D_SECURE_AUTHENTICATION_ID_IS_INVALID("915196"), TRANSACTION_THREE_D_SECURE_TRANSACTION_PAYMENT_METHOD_DOESNT_MATCH_THREE_D_SECURE_AUTHENTICATION_PAYMENT_METHOD("915197"), TRANSACTION_THREE_D_SECURE_AUTHENTICATION_ID_DOESNT_MATCH_NONCE_THREE_D_SECURE_AUTHENTICATION("915198"), TRANSACTION_THREE_D_SECURE_AUTHENTICATION_ID_WITH_THREE_D_SECURE_PASSTHRU_IS_INVALID("915199"), TRANSACTION_THREE_D_SECURE_PASS_THRU_ECI_FLAG_IS_REQUIRED("915113"), TRANSACTION_THREE_D_SECURE_PASS_THRU_CAVV_IS_REQUIRED("915116"), TRANSACTION_THREE_D_SECURE_PASS_THRU_XID_IS_REQUIRED("915115"), TRANSACTION_THREE_D_SECURE_PASS_THRU_ECI_FLAG_IS_INVALID("915114"), TRANSACTION_THREE_D_SECURE_PASS_THRU_MERCHANT_ACCOUNT_DOES_NOT_SUPPORT_CARD_TYPE("915131"), TRANSACTION_THREE_D_SECURE_PASS_THRU_AUTHENTICATION_RESPONSE_IS_INVALID("915120"), TRANSACTION_THREE_D_SECURE_PASS_THRU_DIRECTORY_RESPONSE_IS_INVALID("915121"), TRANSACTION_THREE_D_SECURE_PASS_THRU_CAVV_ALGORITHM_IS_INVALID("915122"), TRANSACTION_TYPE_IS_INVALID("91523"), TRANSACTION_TYPE_IS_REQUIRED("91524"), TRANSACTION_UNSUPPORTED_VOICE_AUTHORIZATION("91539"), TRANSACTION_TRANSACTION_SOURCE_IS_INVALID("915133"), TRANSACTION_CANNOT_SIMULATE_SETTLEMENT("91575"), US_BANK_ACCOUNT_VERIFICATION_NOT_CONFIRMABLE("96101"), US_BANK_ACCOUNT_VERIFICATION_MUST_BE_MICRO_TRANSFERS_VERIFICATION("96102"), US_BANK_ACCOUNT_VERIFICATION_AMOUNTS_DO_NOT_MATCH("96103"), US_BANK_ACCOUNT_VERIFICATION_TOO_MANY_CONFIRMATION_ATTEMPTS("96104"), US_BANK_ACCOUNT_VERIFICATION_UNABLE_TO_CONFIRM_DEPOSIT_AMOUNTS("96105"), US_BANK_ACCOUNT_VERIFICATION_INVALID_DEPOSIT_AMOUNTS("96106"), VERIFICATION_OPTIONS_AMOUNT_CANNOT_BE_NEGATIVE("94201"), VERIFICATION_OPTIONS_AMOUNT_FORMAT_IS_INVALID("94202"), VERIFICATION_OPTIONS_AMOUNT_IS_TOO_LARGE("94207"), VERIFICATION_OPTIONS_AMOUNT_NOT_SUPPORTED_BY_PROCESSOR("94203"), VERIFICATION_OPTIONS_MERCHANT_ACCOUNT_ID_IS_INVALID("94204"), VERIFICATION_OPTIONS_MERCHANT_ACCOUNT_IS_SUSPENDED("94205"), VERIFICATION_OPTIONS_MERCHANT_ACCOUNT_IS_FORBIDDEN("94206"), VERIFICATION_OPTIONS_MERCHANT_ACCOUNT_CANNOT_BE_SUB_MERCHANT_ACCOUNT("94208"), VERIFICATION_OPTIONS_ACCOUNT_TYPE_IS_INVALID("942184"), VERIFICATION_OPTIONS_ACCOUNT_TYPE_NOT_SUPPORTED("942185"), MERCHANT_ACCOUNT_CANNOT_BE_UPDATED("82674"), MERCHANT_ACCOUNT_DECLINED("82626"), MERCHANT_ACCOUNT_DECLINED_FAILED_KYC("82623"), MERCHANT_ACCOUNT_DECLINED_MASTER_CARD_MATCH("82622"), MERCHANT_ACCOUNT_DECLINED_OFAC("82621"), MERCHANT_ACCOUNT_DECLINED_SSN_INVALID("82624"), MERCHANT_ACCOUNT_DECLINED_SSN_MATCHES_DECEASED("82625"), MERCHANT_ACCOUNT_ID_CANNOT_BE_UPDATED("82675"), MERCHANT_ACCOUNT_ID_FORMAT_IS_INVALID("82603"), MERCHANT_ACCOUNT_ID_IS_IN_USE("82604"), MERCHANT_ACCOUNT_ID_IS_NOT_ALLOWED("82605"), MERCHANT_ACCOUNT_ID_IS_TOO_LONG("82602"), MERCHANT_ACCOUNT_MASTER_MERCHANT_ACCOUNT_ID_CANNOT_BE_UPDATED("82676"), MERCHANT_ACCOUNT_MASTER_MERCHANT_ACCOUNT_ID_IS_INVALID("82607"), MERCHANT_ACCOUNT_MASTER_MERCHANT_ACCOUNT_ID_IS_REQUIRED("82606"), MERCHANT_ACCOUNT_MASTER_MERCHANT_ACCOUNT_MUST_BE_ACTIVE("82608"), MERCHANT_ACCOUNT_TOS_ACCEPTED_IS_REQUIRED("82610"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_ACCOUNT_NUMBER_IS_REQUIRED("82614"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_COMPANY_NAME_IS_INVALID("82631"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_COMPANY_NAME_IS_REQUIRED_WITH_TAX_ID("82633"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_DATE_OF_BIRTH_IS_REQUIRED("82612"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_EMAIL_ADDRESS_IS_INVALID("82616"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_FIRST_NAME_IS_INVALID("82627"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_FIRST_NAME_IS_REQUIRED("82609"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_LAST_NAME_IS_INVALID("82628"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_LAST_NAME_IS_REQUIRED("82611"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_PHONE_IS_INVALID("82636"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_ROUTING_NUMBER_IS_INVALID("82635"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_ROUTING_NUMBER_IS_REQUIRED("82613"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_SSN_IS_INVALID("82615"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_TAX_ID_IS_INVALID("82632"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_TAX_ID_IS_REQUIRED_WITH_COMPANY_NAME("82634"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_DATE_OF_BIRTH_IS_INVALID("82663"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_ACCOUNT_NUMBER_IS_INVALID("82670"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_EMAIL_ADDRESS_IS_REQUIRED("82665"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_TAX_ID_MUST_BE_BLANK("82673"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_ADDRESS_REGION_IS_INVALID("82664"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_ADDRESS_LOCALITY_IS_REQUIRED("82618"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_ADDRESS_POSTAL_CODE_IS_INVALID("82630"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_ADDRESS_POSTAL_CODE_IS_REQUIRED("82619"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_ADDRESS_REGION_IS_REQUIRED("82620"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_ADDRESS_STREET_ADDRESS_IS_INVALID("82629"), MERCHANT_ACCOUNT_APPLICANT_DETAILS_ADDRESS_STREET_ADDRESS_IS_REQUIRED("82617"), MERCHANT_ACCOUNT_BUSINESS_LEGAL_NAME_IS_INVALID("82677"), MERCHANT_ACCOUNT_BUSINESS_DBA_NAME_IS_INVALID("82646"), MERCHANT_ACCOUNT_BUSINESS_TAX_ID_IS_INVALID("82647"), MERCHANT_ACCOUNT_BUSINESS_LEGAL_NAME_IS_REQUIRED_WITH_TAX_ID("82669"), MERCHANT_ACCOUNT_BUSINESS_TAX_ID_IS_REQUIRED_WITH_LEGAL_NAME("82648"), MERCHANT_ACCOUNT_BUSINESS_TAX_ID_MUST_BE_BLANK("82672"), MERCHANT_ACCOUNT_BUSINESS_ADDRESS_REGION_IS_INVALID("82684"), MERCHANT_ACCOUNT_BUSINESS_ADDRESS_STREET_ADDRESS_IS_INVALID("82685"), MERCHANT_ACCOUNT_BUSINESS_ADDRESS_POSTAL_CODE_IS_INVALID("82686"), MERCHANT_ACCOUNT_FUNDING_ROUTING_NUMBER_IS_REQUIRED("82640"), MERCHANT_ACCOUNT_FUNDING_ACCOUNT_NUMBER_IS_REQUIRED("82641"), MERCHANT_ACCOUNT_FUNDING_ACCOUNT_NUMBER_IS_INVALID("82671"), MERCHANT_ACCOUNT_FUNDING_ROUTING_NUMBER_IS_INVALID("82649"), MERCHANT_ACCOUNT_FUNDING_DESTINATION_IS_REQUIRED("82678"), MERCHANT_ACCOUNT_FUNDING_DESTINATION_IS_INVALID("82679"), MERCHANT_ACCOUNT_FUNDING_EMAIL_IS_REQUIRED("82680"), MERCHANT_ACCOUNT_FUNDING_EMAIL_IS_INVALID("82681"), MERCHANT_ACCOUNT_FUNDING_MOBILE_PHONE_IS_REQUIRED("82682"), MERCHANT_ACCOUNT_FUNDING_MOBILE_PHONE_IS_INVALID("82683"), MERCHANT_ACCOUNT_INDIVIDUAL_FIRST_NAME_IS_REQUIRED("82637"), MERCHANT_ACCOUNT_INDIVIDUAL_LAST_NAME_IS_REQUIRED("82638"), MERCHANT_ACCOUNT_INDIVIDUAL_DATE_OF_BIRTH_IS_REQUIRED("82639"), MERCHANT_ACCOUNT_INDIVIDUAL_DATE_OF_BIRTH_IS_INVALID("82666"), MERCHANT_ACCOUNT_INDIVIDUAL_SSN_IS_INVALID("82642"), MERCHANT_ACCOUNT_INDIVIDUAL_EMAIL_IS_INVALID("82643"), MERCHANT_ACCOUNT_INDIVIDUAL_EMAIL_IS_REQUIRED("82667"), MERCHANT_ACCOUNT_INDIVIDUAL_FIRST_NAME_IS_INVALID("82644"), MERCHANT_ACCOUNT_INDIVIDUAL_LAST_NAME_IS_INVALID("82645"), MERCHANT_ACCOUNT_INDIVIDUAL_PHONE_IS_INVALID("82656"), MERCHANT_ACCOUNT_INDIVIDUAL_ADDRESS_STREET_ADDRESS_IS_REQUIRED("82657"), MERCHANT_ACCOUNT_INDIVIDUAL_ADDRESS_LOCALITY_IS_REQUIRED("82658"), MERCHANT_ACCOUNT_INDIVIDUAL_ADDRESS_POSTAL_CODE_IS_REQUIRED("82659"), MERCHANT_ACCOUNT_INDIVIDUAL_ADDRESS_REGION_IS_REQUIRED("82660"), MERCHANT_ACCOUNT_INDIVIDUAL_ADDRESS_REGION_IS_INVALID("82668"), MERCHANT_ACCOUNT_INDIVIDUAL_ADDRESS_STREET_ADDRESS_IS_INVALID("82661"), MERCHANT_ACCOUNT_INDIVIDUAL_ADDRESS_POSTAL_CODE_IS_INVALID("82662"), MERCHANT_EMAIL_IS_REQUIRED("83601"), MERCHANT_EMAIL_FORMAT_IS_INVALID("93602"), MERCHANT_COUNTRY_CANNOT_BE_BLANK("83603"), MERCHANT_COUNTRY_CODE_ALPHA3_IS_NOT_ACCEPTED("93604"), MERCHANT_COUNTRY_CODE_ALPHA3_IS_INVALID("93605"), MERCHANT_COUNTRY_CODE_ALPHA2_IS_NOT_ACCEPTED("93606"), MERCHANT_COUNTRY_CODE_ALPHA2_IS_INVALID("93607"), MERCHANT_COUNTRY_CODE_NUMERIC_IS_NOT_ACCEPTED("93608"), MERCHANT_COUNTRY_CODE_NUMERIC_IS_INVALID("93609"), MERCHANT_COUNTRY_NAME_IS_NOT_ACCEPTED("93610"), MERCHANT_COUNTRY_NAME_IS_INVALID("93611"), MERCHANT_CURRENCIES_ARE_INVALID("93614"), MERCHANT_INCONSISTENT_COUNTRY("93612"), MERCHANT_PAYMENT_METHODS_ARE_INVALID("93613"), MERCHANT_PAYMENT_METHODS_ARE_NOT_ALLOWED("93615"), MERCHANT_MERCHANT_ACCOUNT_EXISTS_FOR_CURRENCY("93616"), MERCHANT_CURRENCY_IS_REQUIRED("93617"), MERCHANT_CURRENCY_IS_INVALID("93618"), MERCHANT_NO_MERCHANT_ACCOUNTS("93619"), MERCHANT_MERCHANT_ACCOUNT_EXISTS_FOR_ID("93620"), MERCHANT_MERCHANT_ACCOUNT_NOT_AUTH_ONBOARDED("93621"), REPORT_TRANSACTION_LEVEL_FEES_UNKNOWN_MERCHANT_ACCOUNT("86201"), // Stanza should match https://developers.braintreepayments.com/reference/general/validation-errors/all/java#venmo VENMO_COMMON_ID_REQUIRED("84101"), VENMO_USERNAME_REQUIRED("84102"), VENMO_USER_ID_REQUIRED("84103"), VENMO_CUSTOMER_ID_REQUIRED("84104"), VENMO_ACCOUNTS_NOT_ACCEPTED_BY_MERCHANT_ACCOUNT("84105"), VENMO_INVALID_CUSTOMER_ID("84106"), REQUIRED_ATTRIBUTE_MISSING("941996"), ATTRIBUTE_FORMAT_INVALID("941997"), ATTRIBUTE_IS_UNEXPECTED("941998"), ATTRIBUTE_INCORRECT_TYPE("941999"), @Deprecated UNKOWN_VALIDATION_ERROR(""); // NEXT_MAJOR_VERSION this should be `final` to prevent end users from modifying it public String code; private ValidationErrorCode(String code) { this.code = code; } public static ValidationErrorCode findByCode(String code) { for (ValidationErrorCode validationErrorCode : values()) { if (validationErrorCode.code.equals(code)) { return validationErrorCode; } } return UNKOWN_VALIDATION_ERROR; } }
package org.biojava.bio.structure.align.client; import org.biojava.bio.structure.align.util.ResourceManager; public class FarmJobParameters { public static final int DEFAULT_JOB_TIME = -1; public static final int DEFAULT_NR_ALIGNMENTS = -1; public static final int DEFAULT_NR_THREADS = 1; public static final String DEFAULT_SERVER_URL; private static ResourceManager resourceManager; static { resourceManager = ResourceManager.getResourceManager("jfatcat"); String server = resourceManager.getString("server.url"); DEFAULT_SERVER_URL = server; } public static final String DEFAULT_PDB_PATH = "/tmp/"; public static final boolean DEFAULT_DIR_SPLIT = true; public static final int DEFAULT_BATCH_SIZE = 100; private static final String DEFAULT_BATCH_SIZE_PROP = "request.pair.size"; int nrAlignments; int time; int threads; String server; String pdbFilePath; boolean pdbDirSplit; String username; boolean runBackground; int stepSize; public FarmJobParameters(){ nrAlignments = DEFAULT_NR_ALIGNMENTS; time = DEFAULT_JOB_TIME; threads = DEFAULT_NR_THREADS; server = DEFAULT_SERVER_URL; pdbFilePath = DEFAULT_PDB_PATH; pdbDirSplit = DEFAULT_DIR_SPLIT; runBackground = false; String nrPairsProp = resourceManager.getString(DEFAULT_BATCH_SIZE_PROP); stepSize = DEFAULT_BATCH_SIZE; if ( nrPairsProp != null){ try { stepSize = Integer.parseInt(nrPairsProp); } catch (NumberFormatException ex){ ex.printStackTrace(); } } } public String getPdbFilePath() { return pdbFilePath; } public void setPdbFilePath(String pdbFilePath) { this.pdbFilePath = pdbFilePath; } public String toString() { return "FarmJobParameters [nrAlignments=" + nrAlignments + ", server=" + server + ", threads=" + threads + ", time=" + time + ", username=" + username +"]"; } public int getNrAlignments() { return nrAlignments; } public void setNrAlignments(int nrAlignments) { this.nrAlignments = nrAlignments; } public int getTime() { return time; } public void setTime(int time) { this.time = time; } public int getThreads() { return threads; } public void setThreads(int threads) { this.threads = threads; } public String getServer() { return server; } public void setServer(String server) { this.server = server; } public boolean isPdbDirSplit() { return pdbDirSplit; } public void setPdbDirSplit(boolean pdbDirSplit) { this.pdbDirSplit = pdbDirSplit; } public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } /** Flag if a job that only runs one parallell job should be run in its own thread or in the main thread. * For User interface related apps should be set to true. Default: false; * @return flag */ public boolean isRunBackground() { return runBackground; } public void setRunBackground(boolean runBackground) { this.runBackground = runBackground; } /** how many pairs should be requested for alignment from server? * * @return stepsize */ public int getStepSize() { return stepSize; } public void setStepSize(int stepSize) { this.stepSize = stepSize; } }
package org.biojava.bio.structure.align.fatcat; import junit.framework.TestCase; public class TestOutputStrings extends TestCase { static final String newline = System.getProperty("line.separator"); public void printFirstMismatch(String s1, String s2){ String[] spl1 = s1.split(newline); String[] spl2 = s2.split(newline); for (int i = 0 ; i < spl1.length ; i++){ String line1 = spl1[i]; if ( i >= spl2.length){ System.err.println("s2 does not contain line " + (i+1)); return; } String line2 = spl2[i]; if ( line1.equals(line2)){ continue; } System.err.println("mismatch in line: " + (i+1)); for ( int j = 0 ; j < line1.length();j++){ char c1 = line1.charAt(j); if ( j >= line2.length()){ System.err.println("s2 is shorter than s1. length s1:" + line1.length() + " length2:" + line2.length() ); return; } char c2 = line2.charAt(j); if ( c1 != c2){ System.err.println("line1: " + line1.substring(0,j+1)); System.err.println("line2: " + line2.substring(0,j+1)); System.err.println("mismatch at position " + (j+1) + " c1: "+ c1 + " " + c2); return; } } } } protected void printMismatch(String orig, String mine){ System.err.println("The two provided strings are not identical."); System.err.println("Original version"); System.err.println(orig); System.err.println("My version"); System.err.println(mine); } // a bad mismatch! // looks like a bug in the optimizer still... // spent already too much time with figuring this out. perhaps there is no bug // and the diff is caused by this tricky alignment and some Java/C differences... // public void test1a641hng(){ // String pdb1 = "1a64"; // String chain1 = "A"; // String pdb2 = "1hng"; // String chain2 ="B"; // String originalOutput="Align 1a64A.pdb 94 with 1hngB.pdb 175" +newline + // "Twists 0 ini-len 72 ini-rmsd 20.93 opt-equ 55 opt-rmsd 7.78 chain-rmsd 20.93 Score 189.29 align-len 73 gaps 18 (24.66%)" +newline + // "P-value 9.15e-03 Afp-num 6497 Identity 4.11% Similarity 13.70%" +newline + // "Block 0 afp 9 score 189.29 rmsd 20.93 gap 17 (0.19%)" +newline + // "" +newline + // " . : . : . : . : . : . : . :" +newline + // " 111111111111111111 1111111111111111111 111111111111111" +newline + // "" +newline + // "" +newline + // "Chain 1: 88 ILD" +newline + // " 111" +newline + // "Chain 2: 65 IKN" +newline + // "" +newline + // "Note: positions are from PDB; the numbers between alignments are block index" +newline ; // String result = MyTestHelper.compareAlignment(pdb1, chain1, pdb2, chain2, originalOutput,true); // if (! result.equals("")){ // String msg = "the created alignment images are not identical! "; // printMismatch(originalOutput,result); // printFirstMismatch(result, originalOutput); // fail(msg); // 100% identical public void test1jbe1ord(){ String pdb1 = "1jbe"; String chain1 = "A"; String pdb2 = "1ord"; String chain2 ="A"; String originalOutput="Align 1jbeA.pdb 126 with 1ordA.pdb 730" + newline + "Twists 0 ini-len 72 ini-rmsd 3.09 opt-equ 101 opt-rmsd 3.03 chain-rmsd 3.09 Score 123.13 align-len 127 gaps 26 (20.47%)" + newline + "P-value 3.45e-01 Afp-num 30029 Identity 11.02% Similarity 22.05%" + newline + "Block 0 afp 9 score 123.13 rmsd 3.09 gap 53 (0.42%)" + newline + "" + newline + " . : . : . : . : . : . : . :" + newline + "Chain 1: 3 DKELKFLVVDDFSTMRRIVRNLLKELGFNNVEEAEDGVDALNKLQAGGYGFVISDWNMPNMDGLELLKTI" + newline + " 11111111111 11111 111 1111111 111111111111111 11111111" + newline + "Chain 2: 1 SSSLKIASTQE "" + newline + " . : . : . : . : . : ." + newline + "Chain 1: 73 RAAMSALPVLMVTAEAKKENIIAAAQAGASGYVVKPFT--AATLEEKLNKIFEKLGM" + newline + " 11111111111111111 11111111 11111111111 111111111111 1111" + newline + "Chain 2: 50 DATKFGIPVFAVTKDAQ-AISADELK-KIFHIIDLENKFDATVNAREIETAVNNYED" + newline + "" + newline + "Note: positions are from PDB; the numbers between alignments are block index" + newline ; String result = MyTestHelper.compareAlignment(pdb1, chain1, pdb2, chain2, originalOutput,true); if (! result.equals("")){ String msg = "the created alignment images are not identical! "; printMismatch(originalOutput,result); printFirstMismatch(result, originalOutput); fail(msg); } // no point in testing flexible here, since it is identical... } //exact public void test1buz1ali(){ String pdb1 = "1buz"; String chain1 = "A"; String pdb2 = "1ali"; String chain2 ="A"; String originalOutput ="Align 1buzA.pdb 116 with 1aliA.pdb 446" + newline + "Twists 0 ini-len 64 ini-rmsd 5.32 opt-equ 80 opt-rmsd 3.50 chain-rmsd 5.32 Score 103.72 align-len 153 gaps 73 (47.71%)" +newline + "P-value 2.97e-01 Afp-num 15578 Identity 4.58% Similarity 15.03%" +newline + "Block 0 afp 8 score 103.72 rmsd 5.32 gap 55 (0.46%)" +newline + "" +newline + " . : . : . : . : . : . : . :" +newline + "Chain 1: 23 HHTAETLKQKVTQSLEKDDIRHIVLNLEDLSF " 11111111111111 111111111111 1111 1111111111111111111111111" +newline + "Chain 2: 297 PTLAQMTDKAIELL-SKNEKGFFLQVEGASIDKQDHAANPCGQIGETVDLDEAVQRALEFAKKEGNTLVI" +newline + "" +newline + " . : . : . : . : . : . : . :" +newline + "Chain 1: 80 CAISPAVKRLFDMSGL " 1111111 111 11" +newline + "Chain 2: 366 VTADHAHASQIVAPDTKAPGLTQALNTKDGAVMVMSYGNSEEDSQENTGSQLRIAAYGPHAANVVGLTDQ" +newline + "" +newline + " . :" +newline + "Chain 1: 104 SEQQALLTLGVAS" +newline + " 1111111111111" +newline + "Chain 2: 436 TDLFYTMKAALGL" +newline + "" +newline + "Note: positions are from PDB; the numbers between alignments are block index" + newline ; String result = MyTestHelper.compareAlignment(pdb1, chain1, pdb2, chain2, originalOutput,true); if (! result.equals("")){ String msg = "the created alignment images are not identical! "; printMismatch(originalOutput,result); printFirstMismatch(result, originalOutput); fail(msg); } } // exact public void test1buz1aliFlexible(){ String pdb1 = "1buz"; String chain1 = "A"; String pdb2 = "1ali"; String chain2 ="A"; String originalOutput ="Align 1buzA.pdb 116 with 1aliA.pdb 446" +newline + "Twists 1 ini-len 64 ini-rmsd 3.12 opt-equ 88 opt-rmsd 3.34 chain-rmsd 5.32 Score 103.72 align-len 199 gaps 111 (55.78%)" +newline + "P-value 3.26e-01 Afp-num 15578 Identity 3.52% Similarity 14.57%" +newline + "Block 0 afp 1 score 23.14 rmsd 0.76 gap 0 (0.00%)" +newline + "Block 1 afp 7 score 100.08 rmsd 3.32 gap 17 (0.23%)" +newline + "" +newline + " . : . : . : . : . : . : . :" +newline + "Chain 1: 5 DMNVKESVLCIRLTGELDH " 1 11111111111111 222222222222222222" +newline + "Chain 2: 246 VTEANQQKPLLGLFADGNMPVRWLGPKATYHGNIDKPAVTCTPNPQRNDSVPTLAQMTDKAIELLSKNEK" +newline + "" +newline + " . : . : . : . : . : . : . :" +newline + "Chain 1: 42 IRHIVLNLEDLS " 222222222222 222222222222222222 2222222222222 " +newline + "Chain 2: 316 GFFLQVEGASIDKQDHAANPCGQIGETVDLDEAVQRALEFAKKEGNTLVIVTADHAHASQIVAPDTKAPG" +newline + "" +newline + " . : . : . : . : . : ." +newline + "Chain 1: 99 I " 22 222222 2222" +newline + "Chain 2: 386 LTQALNTKDGAVMVMSYGNSEEDSQENTGSQLRIAAYGPHAANVVGLTDQTDLFYTMKA" +newline + "" +newline + "Note: positions are from PDB; the numbers between alignments are block index" +newline ; String result = MyTestHelper.compareAlignment(pdb1, chain1, pdb2, chain2, originalOutput,false); if (! result.equals("")){ String msg = "the created alignment images are not identical! "; printMismatch(originalOutput,result); printFirstMismatch(result, originalOutput); fail(msg); } } //exact public void test4hhbs(){ String pdb1= "4hhb"; String pdb2 = "4hhb"; String chain1 = "A"; String chain2 = "B"; String originalOutput="Align 4hhbA.pdb 141 with 4hhbB.pdb 146" +newline + "Twists 0 ini-len 128 ini-rmsd 1.36 opt-equ 139 opt-rmsd 1.49 chain-rmsd 1.36 Score 364.84 align-len 147 gaps 8 (5.44%)" +newline + "P-value 0.00e+00 Afp-num 13309 Identity 40.82% Similarity 57.82%" +newline + "Block 0 afp 16 score 364.84 rmsd 1.36 gap 17 (0.12%)" +newline + "" +newline + " . : . : . : . : . : . : . :" +newline + "Chain 1: 1 VLSPADKTNVKAAWGKVGAHAGEYGAEALERMFLSFPTTKTYFPHFDL " 11111111111111111 11111111111111111111111111111 1111111111111111" +newline + "Chain 2: 2 HLTPEEKSAVTALWGKV--NVDEVGGEALGRLLVVYPWTQRFFESFGDLSTPDAVMGNPKVKAHGKKVLG" +newline + "" +newline + " . : . : . : . : . : . : . :" +newline + "Chain 1: 65 ALTNAVAHVDDMPNALSALSDLHAHKLRVDPVNFKLLSHCLLVTLAAHLPAEFTPAVHASLDKFLASVST" +newline + " 1111111111111111111111111111111111111111111111111111111111111111111111" +newline + "Chain 2: 70 AFSDGLAHLDNLKGTFATLSELHCDKLHVDPENFRLLGNVLVCVLAHHFGKEFTPPVQAAYQKVVAGVAN" +newline + "" +newline + " ." +newline + "Chain 1: 135 VLTSKYR" +newline + " 1111111" +newline + "Chain 2: 140 ALAHKYH" +newline + "" +newline + "Note: positions are from PDB; the numbers between alignments are block index" +newline ; String result = MyTestHelper.compareAlignment(pdb1, chain1, pdb2, chain2, originalOutput,true); if (! result.equals("")){ String msg = "the created alignment images are not identical! "; printMismatch(originalOutput,result); printFirstMismatch(result, originalOutput); fail(msg); } } //exact public void test4hhbsFlexible(){ String pdb1= "4hhb"; String pdb2 = "4hhb"; String chain1 = "A"; String chain2 = "B"; String originalOutput="Align 4hhbA.pdb 141 with 4hhbB.pdb 146" +newline + "Twists 0 ini-len 128 ini-rmsd 1.36 opt-equ 139 opt-rmsd 1.49 chain-rmsd 1.36 Score 364.84 align-len 147 gaps 8 (5.44%)" +newline + "P-value 0.00e+00 Afp-num 13309 Identity 40.82% Similarity 57.82%" +newline + "Block 0 afp 16 score 364.84 rmsd 1.36 gap 17 (0.12%)" +newline + "" +newline + " . : . : . : . : . : . : . :" +newline + "Chain 1: 1 VLSPADKTNVKAAWGKVGAHAGEYGAEALERMFLSFPTTKTYFPHFDL " 11111111111111111 11111111111111111111111111111 1111111111111111" +newline + "Chain 2: 2 HLTPEEKSAVTALWGKV--NVDEVGGEALGRLLVVYPWTQRFFESFGDLSTPDAVMGNPKVKAHGKKVLG" +newline + "" +newline + " . : . : . : . : . : . : . :" +newline + "Chain 1: 65 ALTNAVAHVDDMPNALSALSDLHAHKLRVDPVNFKLLSHCLLVTLAAHLPAEFTPAVHASLDKFLASVST" +newline + " 1111111111111111111111111111111111111111111111111111111111111111111111" +newline + "Chain 2: 70 AFSDGLAHLDNLKGTFATLSELHCDKLHVDPENFRLLGNVLVCVLAHHFGKEFTPPVQAAYQKVVAGVAN" +newline + "" +newline + " ." +newline + "Chain 1: 135 VLTSKYR" +newline + " 1111111" +newline + "Chain 2: 140 ALAHKYH" +newline + "" +newline + "Note: positions are from PDB; the numbers between alignments are block index" +newline ; String result = MyTestHelper.compareAlignment(pdb1, chain1, pdb2, chain2, originalOutput,false); if (! result.equals("")){ String msg = "the created alignment images are not identical! "; printMismatch(originalOutput,result); printFirstMismatch(result, originalOutput); fail(msg); } } public void test1a641hngFlexible(){ String pdb1 = "1a64"; String chain1 = "A"; String pdb2 = "1hng"; String chain2 ="B"; String originalOutput="Align 1a64A.pdb 94 with 1hngB.pdb 175" +newline + "Twists 1 ini-len 88 ini-rmsd 1.84 opt-equ 94 opt-rmsd 0.64 chain-rmsd 20.77 Score 235.94 align-len 96 gaps 2 (2.08%)" +newline + "P-value 4.23e-13 Afp-num 6497 Identity 96.88% Similarity 97.92%" +newline + "Block 0 afp 5 score 118.80 rmsd 0.75 gap 0 (0.00%)" +newline + "Block 1 afp 6 score 143.14 rmsd 0.46 gap 0 (0.00%)" +newline + "" +newline + " . : . : . : . : . : . : . :" +newline + "Chain 1: 4 GTVWGALGHGINLNIPNFQMTDDIDEVRWERGSTLVAEFKR--KPFLKSGAFEILANGDLKIKNLTRDDS" +newline + " 11111111111111111111111111111111111111111 222222222222222222222222222" +newline + "Chain 2: 4 GTVWGALGHGINLNIPNFQMTDDIDEVRWERGSTLVAEFKRKMKPFLKSGAFEILANGDLKIKNLTRDDS" +newline + "" +newline + " . : . : ." +newline + "Chain 1: 74 GTYNVTVYSTNGTRILDKALDLRILE" +newline + " 22222222222222222222222222" +newline + "Chain 2: 74 GTYNVTVYSTNGTRILNKALDLRILE" +newline + "" +newline + "Note: positions are from PDB; the numbers between alignments are block index" +newline ; String result = MyTestHelper.compareAlignment(pdb1, chain1, pdb2, chain2, originalOutput,false); if (! result.equals("")){ String msg = "the created alignment images are not identical! "; printMismatch(originalOutput,result); printFirstMismatch(result, originalOutput); fail(msg); } } // 100% identical public void test1nbw1kidFlexible(){ String pdb1 = "1nbw"; String chain1 = "A"; String pdb2 = "1kid"; String chain2 ="A"; String originalOutput="Align 1nbwA.pdb 606 with 1kidA.pdb 193" +newline + "Twists 5 ini-len 120 ini-rmsd 5.60 opt-equ 155 opt-rmsd 3.58 chain-rmsd 21.86 Score 133.98 align-len 248 gaps 93 (37.50%)" +newline + "P-value 6.48e-01 Afp-num 37019 Identity 5.65% Similarity 18.55%" +newline + "Block 0 afp 6 score 68.26 rmsd 4.20 gap 13 (0.21%)" +newline + "Block 1 afp 2 score 42.05 rmsd 1.96 gap 1 (0.06%)" +newline + "Block 2 afp 1 score 22.05 rmsd 1.14 gap 0 (0.00%)" +newline + "Block 3 afp 2 score 40.82 rmsd 2.48 gap 0 (0.00%)" +newline + "Block 4 afp 2 score 40.27 rmsd 2.24 gap 0 (0.00%)" +newline + "Block 5 afp 2 score 47.60 rmsd 0.60 gap 0 (0.00%)" +newline + "" +newline + " . : . : . : . : . : . : . :" +newline + "Chain 1: 93 TESTMIGHNPQTPGGVG " 1 111111111111111 1111 111111111111111111111111" +newline + "Chain 2: 190 SEGMQFDRGYLSPYFINKPETGAVELES "" +newline + " . : . : . : . : . : . : . :" +newline + "Chain 1: 156 RGINVVAAILKKDD--GVLVNNRLR---KTLPVVDEVTLLEQVPEGVMAAVEVAAPGQVVRILSNPYGIA" +newline + " 11111111111111 111111111 1111111 222" +newline + "Chain 2: 242 KAGKPLLIIAEDVEGEALATLVVNTMRGIVKVAAV "" +newline + " . : . : . : . : . : . : . :" +newline + "Chain 1: 221 TFFGLSPEETQAIVPIARALIGNRSAVVLKTPQGDVQSRVIPA " 22222222222222222 33333333 444444444444444 55555" +newline + "Chain 2: 280 "" +newline + " . : . : . : ." +newline + "Chain 1: 286 IMQAMSACAPVRDIRGEPGTHAGGMLERVRKVMASLTG" +newline + " 555555555555 666666666666666666666" +newline + "Chain 2: 342 IQGRVAQIRQQIEE---ATSDYDREKLQERVAKLAGGV" +newline + "" +newline + "Note: positions are from PDB; the numbers between alignments are block index" +newline; String result = MyTestHelper.compareAlignment(pdb1, chain1, pdb2, chain2, originalOutput,false); if (! result.equals("")){ String msg = "the created alignment images are not identical! "; printMismatch(originalOutput,result); printFirstMismatch(result, originalOutput); fail(msg); } } // 100% identical public void test1cdg8tim(){ String pdb1 = "1cdg"; String chain1 = "A"; String pdb2 = "8tim"; String chain2 ="A"; String originalOutput ="Align 1cdgA.pdb 686 with 8timA.pdb 247" +newline + "Twists 0 ini-len 128 ini-rmsd 8.15 opt-equ 159 opt-rmsd 4.72 chain-rmsd 8.15 Score 185.44 align-len 238 gaps 79 (33.19%)" +newline + "P-value 1.38e-01 Afp-num 50059 Identity 5.46% Similarity 13.87%" +newline + "Block 0 afp 16 score 185.44 rmsd 8.15 gap 121 (0.49%)" +newline + "" +newline + " . : . : . : . : . : . : . :" +newline + "Chain 1: 193 NLYDLADLNHNNSTVDVYLKDAIKMWLDLGIDGIRMDA---VKHMPFGWQKSFMAAVNNYKPVFTFGEWF" +newline + " 11111111 1 111111 111111111 111111111 111 111111" +newline + "Chain 2: 16 GDKKSLGELI "" +newline + " . : . : . : . : . : . : . :" +newline + "Chain 1: 260 LGV " 111 11111111111 111111111 11 1111111111111111111111 11" +newline + "Chain 2: 65 NCYKVPKGAFTGEISPAMIKDIG "" +newline + " . : . : . : . : . : . : . :" +newline + "Chain 1: 321 VTFIDNHDMERFHASNANRRKLEQALAFTLTS " 11111 111111111111111111111 111111 11111111 111111" +newline + "Chain 2: 124 IACIG "" +newline + " . : . : ." +newline + "Chain 1: 382 STSTTAYQVIQKLAPLRK---CNPAIAY" +newline + " 11 1111111111111 1111111" +newline + "Chain 2: 179 QQ---AQEVHEKLRGWLKTHVSDAVAQS" +newline + "" +newline + "Note: positions are from PDB; the numbers between alignments are block index" +newline ; String result = MyTestHelper.compareAlignment(pdb1, chain1, pdb2, chain2, originalOutput,true); if (! result.equals("")){ String msg = "the created alignment images are not identical! "; printMismatch(originalOutput,result); printFirstMismatch(result, originalOutput); fail(msg); } } // 100% identical public void test1cdg8timFlexible(){ String pdb1 = "1cdg"; String chain1 = "A"; String pdb2 = "8tim"; String chain2 ="A"; String originalOutput ="Align 1cdgA.pdb 686 with 8timA.pdb 247" +newline + "Twists 3 ini-len 112 ini-rmsd 3.95 opt-equ 149 opt-rmsd 3.85 chain-rmsd 8.15 Score 185.44 align-len 255 gaps 106 (41.57%)" +newline + "P-value 3.52e-01 Afp-num 50059 Identity 7.45% Similarity 18.82%" +newline + "Block 0 afp 4 score 72.41 rmsd 2.94 gap 28 (0.47%)" +newline + "Block 1 afp 2 score 26.63 rmsd 2.67 gap 1 (0.06%)" +newline + "Block 2 afp 5 score 86.03 rmsd 4.72 gap 24 (0.38%)" +newline + "Block 3 afp 3 score 49.73 rmsd 4.26 gap 28 (0.54%)" +newline + "" +newline + " . : . : . : . : . : . : . :" +newline + "Chain 1: 223 IDGIRMDAVKHMPFGWQKSFMAAVNNYKP " 11111111111111111111111111111 111111111 " +newline + "Chain 2: 5 KFFVGGNWKMNGDKKSLGELIHTLNGAKLSADTEVVCGAPSIYLDFARQKLDAKIGVAAQNCYKVPKGAF" +newline + "" +newline + " . : . : . : . : . : . : . :" +newline + "Chain 1: 266 -SPENHKFANESGMSLLDFRFAQKVRQVFRDNTDNMYGLKAMLEGSAADYAQVDDQVTFIDNHDMERFHA" +newline + " 2222222222222222222 333333333333333333333333333 333333 333" +newline + "Chain 2: 75 TGEISPAMIKDIGAAWVILG "" +newline + " . : . : . : . : . : . : . :" +newline + "Chain 1: 335 SNANR--RKLEQALAFTLTSR " 33333 33333333333333 333333 444444444444" +newline + "Chain 2: 131 LDEREAGITEKVVFEQTKAIADNVKDWSKVVLAYEPVWAIGTGKTA "" +newline + " . : . : . : . : ." +newline + "Chain 1: 395 APLRKCNPAIAYGSTQERWINNDVLIYERKFGSNVAVVAVNRNLN" +newline + " 4444444444444 4 44444" +newline + "Chain 2: 189 RGWLKTHVSDAVAQSTRIIYGGSVTGGNCKELASQHDVDGFLVGG" +newline + "" +newline + "Note: positions are from PDB; the numbers between alignments are block index" +newline ; String result = MyTestHelper.compareAlignment(pdb1, chain1, pdb2, chain2, originalOutput,false); if (! result.equals("")){ String msg = "the created alignment images are not identical! "; printMismatch(originalOutput,result); printFirstMismatch(result, originalOutput); fail(msg); } } //exact public void test1a211hwgFlexible(){ String pdb1 = "1a21"; String chain1 = "A"; String pdb2 = "1hwg"; String chain2 ="C"; String originalOutput="Align 1a21A.pdb 194 with 1hwgC.pdb 191" +newline + "Twists 1 ini-len 120 ini-rmsd 3.04 opt-equ 150 opt-rmsd 2.96 chain-rmsd 4.21 Score 233.34 align-len 210 gaps 60 (28.57%)" +newline + "P-value 1.15e-05 Afp-num 12696 Identity 9.52% Similarity 19.05%" +newline + "Block 0 afp 4 score 66.42 rmsd 2.03 gap 6 (0.16%)" +newline + "Block 1 afp 11 score 184.29 rmsd 3.24 gap 69 (0.44%)" +newline + "" +newline + " . : . : . : . : . : . : . :" +newline + "Chain 1: 6 RAYNLTWKSTN-FKTILEWEPKSIDHVYTVQISTRLENWKSKCFLTAE---TECDLTDEVVKDVGQTYMA" +newline + " 11111111111 111111111 111111111111111111 222222222 222222 " +newline + "Chain 2: 32 EPKFTKCRSPERETFSCHWTD "" +newline + " . : . : . : . : . : . : . :" +newline + "Chain 1: 72 RVLSYPARNTTGFPEEPPFRNSPEFTPYLDTNLGQPTIQSFEQVG " 22222222222222222222222 222222222222 222" +newline + "Chain 2: 109 IKLTSNGGTVDE "" +newline + " . : . : . : . : . : . : . :" +newline + "Chain 1: 141 LRAVFGKDLNYTLYYWR " 222222 222222222 22222 22222222222222 2222222222222 222222222222" +newline + "Chain 2: 166 QKGWMV--LEYELQYKEVNETKWKMMDPILTTSVPVYSLKVDKEYEVRVRSKQRNS--GNYGEFSEVLYV" +newline + "" +newline + "Note: positions are from PDB; the numbers between alignments are block index" +newline; String result = MyTestHelper.compareAlignment(pdb1, chain1, pdb2, chain2, originalOutput,false); if (! result.equals("")){ String msg = "the created alignment images are not identical! "; printMismatch(originalOutput,result); printFirstMismatch(result, originalOutput); fail(msg); } } }
package com.danubetech.libsovrin; public class SovrinException extends Exception { private static final long serialVersionUID = 2650355290834266477L; public SovrinException(String message) { super(message); } public static SovrinException fromErrorCode(ErrorCode errorCode) { return new SovrinException("" + (errorCode == null ? null : errorCode.name()) + ": " + (errorCode == null ? null : errorCode.value())); } }
package stsc.general.simulator.multistarter; import stsc.general.simulator.multistarter.BadParameterException; import stsc.general.simulator.multistarter.MpDouble; import stsc.general.simulator.multistarter.ParameterList; import junit.framework.TestCase; public class ParameterListTest extends TestCase { private ParameterList<Double> getList() throws BadParameterException { final ParameterList<Double> list = new ParameterList<Double>(); list.add(new MpDouble("asd", 0.0, 1.0, 0.1)); list.add(new MpDouble("vrt", 0.5, 1.0, 0.1)); return list; } public void testParameterList() throws BadParameterException { final ParameterList<Double> list = getList(); list.increment(); list.increment(); list.increment(); list.increment(); list.increment(); final ParameterList<Double> clone = list.clone(); assertEquals("5: [asd:0.0 from (0.1|0.0:1.0), vrt:0.5 from (0.1|0.5:1.0)]", list.toString()); assertEquals("0: [asd:0.0 from (0.1|0.0:1.0), vrt:0.5 from (0.1|0.5:1.0)]", clone.toString()); } public void testParameterListSize() throws BadParameterException { final ParameterList<Double> list = getList(); assertEquals(50L, list.size()); } }
package com.eharrison.canary.r2w; import static com.eharrison.canary.r2w.RegionUtil.*; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; import java.io.IOException; import java.util.concurrent.Callable; import java.util.concurrent.Future; import net.canarymod.Canary; import net.canarymod.api.world.DimensionType; import net.canarymod.api.world.World; import net.canarymod.api.world.WorldManager; import net.canarymod.api.world.blocks.Block; import net.canarymod.api.world.blocks.BlockType; import net.minecraft.world.level.chunk.DataLayer; import net.visualillusionsent.utils.TaskManager; import org.apache.commons.io.FileUtils; import org.apache.logging.log4j.Logger; import com.eharrison.canary.r2w.io.RegionFile; import com.mojang.nbt.CompoundTag; import com.mojang.nbt.ListTag; import com.mojang.nbt.NbtIo; // /rtw restore default normal -10 67 25 -12 67 27 // /rtw restore default normal -10 50 -10 10 80 10 // /rtw restore default normal -8 54 9 -13 51 14 public class TemplateManager { private final Logger log; private final WorldManager worldManager; private final File worldsDir; private final File templatesDir; public TemplateManager() { this(ReturnPlugin.LOG, Canary.getServer().getWorldManager(), new File("worlds"), new File( "templates")); } public TemplateManager(final Logger log, final WorldManager worldManager, final File worldsDir, final File templatesDir) { this.log = log; this.worldManager = worldManager; this.worldsDir = worldsDir; this.templatesDir = templatesDir; } public Future<Boolean> createTemplate(final String name, final DimensionType type) { return TaskManager.submitTask(new Callable<Boolean>() { @Override public Boolean call() { boolean success = true; // Unload the world boolean loaded = false; if (worldManager.worldIsLoaded(name, type)) { log.info("Unloading world"); loaded = true; worldManager.getWorld(name, type, false).broadcastMessage( "Creating a world template, You have to GET OUT!"); // TODO: Add pause? worldManager.unloadWorld(name, type, true); } // Create the template directory final File templateDir = getTemplateDir(name, type); if (!templateDir.exists()) { success = templateDir.mkdirs(); } log.info("Prepared template world directory"); if (success) { // Copy the world region data into the template log.info("Copying region files"); try { FileUtils.copyDirectory(new File(worldsDir, name + "/" + name + "_" + type.getName() + "/region"), new File(templateDir, "region")); } catch (final IOException e) { log.error("Error copying region files", e); } } // Load the world if it was loaded to begin with if (loaded) { log.info("Loading world"); worldManager.loadWorld(name, type); } log.info("Created template " + name + "_" + type.getName() + ": " + success); return success; } }); } public Future<Boolean> removeTemplate(final String name, final DimensionType type) { return TaskManager.submitTask(new Callable<Boolean>() { @Override public Boolean call() throws IOException { boolean success = true; final File templateDir = getTemplateDir(name, type); if (templateDir.exists() && templateDir.canWrite()) { success = FileUtils.deleteQuietly(templateDir); } log.info("Removed template " + name + "_" + type.getName() + ": " + success); return success; } }); } public Future<Boolean> update(final String name, final DimensionType type, final int x1, final int y1, final int z1, final int x2, final int y2, final int z2) { return TaskManager.submitTask(new Callable<Boolean>() { @Override public Boolean call() throws Exception { boolean success = false; // Load the target world final World world = worldManager.getWorld(name, type, true); if (world != null) { success = true; // Determine the block bounds final int xMin = Math.min(x1, x2); final int xMax = Math.max(x1, x2); final int yMin = Math.max(Math.min(y1, y2), 0); final int yMax = Math.min(Math.max(y1, y2), 255); final int zMin = Math.min(z1, z2); final int zMax = Math.max(z1, z2); // Determine the included regions final int regionXMin = RegionUtil.getRegionForBlockCoordinate(xMin); final int regionXMax = RegionUtil.getRegionForBlockCoordinate(xMax); final int regionZMin = RegionUtil.getRegionForBlockCoordinate(zMin); final int regionZMax = RegionUtil.getRegionForBlockCoordinate(zMax); // For each region for (int regionX = regionXMin; regionX <= regionXMax; regionX++) { for (int regionZ = regionZMin; regionZ <= regionZMax; regionZ++) { // Load the appropriate region file final RegionFile region = loadRegionFile(name, type, regionX, regionZ); if (region != null) { log.info("Processing region: " + regionX + ":" + regionZ); updateRegion(world, region, regionX, regionZ, xMin, yMin, zMin, xMax, yMax, zMax); // Close the region File region.close(); } } } } return success; } }); } public Future<Boolean> restore(final String name, final DimensionType type, final int x1, final int y1, final int z1, final int x2, final int y2, final int z2) { return TaskManager.submitTask(new Callable<Boolean>() { @Override public Boolean call() throws IOException { boolean success = false; // Load the target world final World world = worldManager.getWorld(name, type, true); if (world != null) { success = true; // Determine the block bounds final int xMin = Math.min(x1, x2); final int xMax = Math.max(x1, x2); final int yMin = Math.max(Math.min(y1, y2), 0); final int yMax = Math.min(Math.max(y1, y2), 255); final int zMin = Math.min(z1, z2); final int zMax = Math.max(z1, z2); // Determine the included regions final int regionXMin = RegionUtil.getRegionForBlockCoordinate(xMin); final int regionXMax = RegionUtil.getRegionForBlockCoordinate(xMax); final int regionZMin = RegionUtil.getRegionForBlockCoordinate(zMin); final int regionZMax = RegionUtil.getRegionForBlockCoordinate(zMax); // For each region for (int regionX = regionXMin; regionX <= regionXMax; regionX++) { for (int regionZ = regionZMin; regionZ <= regionZMax; regionZ++) { // Load the appropriate region file final RegionFile region = loadRegionFile(name, type, regionX, regionZ); if (region != null) { log.info("Processing region: " + regionX + ":" + regionZ); restoreRegion(world, region, regionX, regionZ, xMin, yMin, zMin, xMax, yMax, zMax); // Close the region File region.close(); } } } } return success; } }); } public BlockType getTemplateBlock(final String name, final DimensionType type, final int x, final int y, final int z) throws IOException { // Determine the absolute chunk of the block final int chunkX = getChunkForBlockCoordinate(x); final int chunkZ = getChunkForBlockCoordinate(z); // System.out.println("Chunk: " + chunkX + ":" + chunkZ); // Determine the region of the chunk final int regionX = getRegionForChunkCoordinate(chunkX); final int regionZ = getRegionForChunkCoordinate(chunkZ); // System.out.println("Region: " + regionX + ":" + regionZ); // Load the appropriate region file final RegionFile region = loadRegionFile(name, type, regionX, regionZ); // System.out.println(region.printOffsets()); // Determine the relative chunk in the region final int relChunkX = getRegionRelativeChunkCoordinate(chunkX); final int relChunkZ = getRegionRelativeChunkCoordinate(chunkZ); // System.out.println("Region Chunk: " + relChunkX + ":" + relChunkZ); // Load the chunk final DataInputStream dis = region.getChunkDataInputStream(relChunkX, relChunkZ); final CompoundTag level = NbtIo.read(dis); dis.close(); final CompoundTag chunkData = level.getCompound("Level"); // Get the height map // final int[] heightMap = chunkData.getIntArray("HeightMap"); // System.out.println(printHeightMap(heightMap, 16)); // Determine the section of the chunk final int sectionId = getSectionForBlockCoordinate(y); // System.out.println("Section: " + sectionId); // Find the section final ListTag<?> sections = chunkData.getList("Sections"); final CompoundTag section = (CompoundTag) sections.get(sectionId); // System.out.println("Section: " + sectionId); // System.out.println(printSection(section)); // Determine the relative block location final int relX = getChunkRelativeBlockCoordinate(x); final int relY = getSectionRelativeBlockCoordinate(y); final int relZ = getChunkRelativeBlockCoordinate(z); // System.out.println("Section Block: " + relX + ":" + relY + ":" + relZ); // Get the block type and data from the section final byte[] blocks = section.getByteArray("Blocks"); final DataLayer dataValues = new DataLayer(section.getByteArray("Data"), 4); final int blockType = blocks[relY << 8 | relZ << 4 | relX]; final int blockData = dataValues.get(relX, relY, relZ); // Close the region File region.close(); return BlockType.fromIdAndData(blockType, blockData); } private void restoreRegion(final World world, final RegionFile region, final int regionX, final int regionZ, final int xMin, final int yMin, final int zMin, final int xMax, final int yMax, final int zMax) throws IOException { if (world != null && region != null) { // Determine the included blocks final int regionBlockXMin = RegionUtil.getRegionBlockIntersection(regionX, xMin); final int regionBlockXMax = RegionUtil.getRegionBlockIntersection(regionX, xMax); final int regionBlockZMin = RegionUtil.getRegionBlockIntersection(regionZ, zMin); final int regionBlockZMax = RegionUtil.getRegionBlockIntersection(regionZ, zMax); // Determine the included chunks final int chunkXMin = RegionUtil.getChunkForBlockCoordinate(regionBlockXMin); final int chunkXMax = RegionUtil.getChunkForBlockCoordinate(regionBlockXMax); final int chunkZMin = RegionUtil.getChunkForBlockCoordinate(regionBlockZMin); final int chunkZMax = RegionUtil.getChunkForBlockCoordinate(regionBlockZMax); // For each chunk for (int chunkX = chunkXMin; chunkX <= chunkXMax; chunkX++) { for (int chunkZ = chunkZMin; chunkZ <= chunkZMax; chunkZ++) { // Get the relative location of the chunk final int relChunkX = RegionUtil.getRegionRelativeChunkCoordinate(chunkX); final int relChunkZ = RegionUtil.getRegionRelativeChunkCoordinate(chunkZ); if (region.hasChunk(relChunkX, relChunkZ)) { // Load the chunk final DataInputStream dis = region.getChunkDataInputStream(relChunkX, relChunkZ); final CompoundTag chunk = NbtIo.read(dis); dis.close(); restoreChunk(world, chunk, chunkX, chunkZ, xMin, yMin, zMin, xMax, yMax, zMax); } } } } } private void restoreChunk(final World world, final CompoundTag chunk, final int chunkX, final int chunkZ, final int xMin, final int yMin, final int zMin, final int xMax, final int yMax, final int zMax) { if (chunk != null && !chunk.isEmpty()) { final CompoundTag level = chunk.getCompound("Level"); log.debug("Processing chunk: " + chunkX + ":" + chunkZ); // Determine the included sections final int sectionMin = RegionUtil.getSectionForBlockCoordinate(yMin); final int sectionMax = RegionUtil.getSectionForBlockCoordinate(yMax); // For each section final ListTag<?> sections = level.getList("Sections"); for (int sectionY = sectionMin; sectionY <= sectionMax; sectionY++) { if (sectionY < sections.size()) { // Restore the section final CompoundTag section = (CompoundTag) sections.get(sectionY); restoreSection(world, chunkX, chunkZ, section, xMin, yMin, zMin, xMax, yMax, zMax); } else { // Fill the section with air fillSection(world, chunkX, chunkZ, sectionY, xMin, yMin, zMin, xMax, yMax, zMax, BlockType.Air); } } } } private void restoreSection(final World world, final int chunkX, final int chunkZ, final CompoundTag section, final int xMin, final int yMin, final int zMin, final int xMax, final int yMax, final int zMax) { if (section != null && !section.isEmpty()) { final int sectionY = section.getByte("Y"); // log.info("Processing section: " + sectionY); // Determine the included blocks final int sectionBlockMin = RegionUtil.getSectionBlockIntersection(sectionY, yMin); final int sectionBlockMax = RegionUtil.getSectionBlockIntersection(sectionY, yMax); for (int blockY = sectionBlockMin; blockY <= sectionBlockMax; blockY++) { // Determine the included blocks final int blockXMin = RegionUtil.getChunkBlockIntersection(chunkX, xMin); final int blockXMax = RegionUtil.getChunkBlockIntersection(chunkX, xMax); final int blockZMin = RegionUtil.getChunkBlockIntersection(chunkZ, zMin); final int blockZMax = RegionUtil.getChunkBlockIntersection(chunkZ, zMax); // For each block for (int blockX = blockXMin; blockX <= blockXMax; blockX++) { for (int blockZ = blockZMin; blockZ <= blockZMax; blockZ++) { restoreBlock(world, section, blockX, blockY, blockZ); } } } } } private void fillSection(final World world, final int chunkX, final int chunkZ, final int sectionY, final int xMin, final int yMin, final int zMin, final int xMax, final int yMax, final int zMax, final BlockType blockType) { log.debug("Filling section: " + sectionY); // Determine the included blocks final int sectionBlockMin = RegionUtil.getSectionBlockIntersection(sectionY, yMin); final int sectionBlockMax = RegionUtil.getSectionBlockIntersection(sectionY, yMax); for (int blockY = sectionBlockMin; blockY <= sectionBlockMax; blockY++) { // Determine the included blocks final int blockXMin = RegionUtil.getChunkBlockIntersection(chunkX, xMin); final int blockXMax = RegionUtil.getChunkBlockIntersection(chunkX, xMax); final int blockZMin = RegionUtil.getChunkBlockIntersection(chunkZ, zMin); final int blockZMax = RegionUtil.getChunkBlockIntersection(chunkZ, zMax); // For each block for (int blockX = blockXMin; blockX <= blockXMax; blockX++) { for (int blockZ = blockZMin; blockZ <= blockZMax; blockZ++) { // Set the block in the target world log.debug("Setting block: " + blockX + ":" + blockY + ":" + blockZ + " to " + blockType.getId() + ":" + blockType.getData()); final Block block = world.getBlockAt(blockX, blockY, blockZ); block.setType(blockType); block.update(); } } } } private void restoreBlock(final World world, final CompoundTag section, final int blockX, final int blockY, final int blockZ) { if (world != null && section != null && !section.isEmpty()) { // log.info("Processing block: " + blockX + ":" + blockY + ":" + blockZ); // Determine the relative block location final int relX = getChunkRelativeBlockCoordinate(blockX); final int relY = getSectionRelativeBlockCoordinate(blockY); final int relZ = getChunkRelativeBlockCoordinate(blockZ); // Get the block type and data from the section final byte[] blocks = section.getByteArray("Blocks"); final DataLayer dataValues = new DataLayer(section.getByteArray("Data"), 4); final short type = blocks[relY << 8 | relZ << 4 | relX]; final short data = (short) dataValues.get(relX, relY, relZ); log.debug("Setting block: " + blockX + ":" + blockY + ":" + blockZ + " to " + type + ":" + data); // Set the block in the target world final Block block = world.getBlockAt(blockX, blockY, blockZ); block.setType(BlockType.fromIdAndData(type, data)); block.update(); } } private void updateRegion(final World world, final RegionFile region, final int regionX, final int regionZ, final int xMin, final int yMin, final int zMin, final int xMax, final int yMax, final int zMax) throws IOException { if (world != null && region != null) { // Determine the included blocks final int regionBlockXMin = RegionUtil.getRegionBlockIntersection(regionX, xMin); final int regionBlockXMax = RegionUtil.getRegionBlockIntersection(regionX, xMax); final int regionBlockZMin = RegionUtil.getRegionBlockIntersection(regionZ, zMin); final int regionBlockZMax = RegionUtil.getRegionBlockIntersection(regionZ, zMax); // Determine the included chunks final int chunkXMin = RegionUtil.getChunkForBlockCoordinate(regionBlockXMin); final int chunkXMax = RegionUtil.getChunkForBlockCoordinate(regionBlockXMax); final int chunkZMin = RegionUtil.getChunkForBlockCoordinate(regionBlockZMin); final int chunkZMax = RegionUtil.getChunkForBlockCoordinate(regionBlockZMax); // For each chunk for (int chunkX = chunkXMin; chunkX <= chunkXMax; chunkX++) { for (int chunkZ = chunkZMin; chunkZ <= chunkZMax; chunkZ++) { // Get the relative location of the chunk final int relChunkX = RegionUtil.getRegionRelativeChunkCoordinate(chunkX); final int relChunkZ = RegionUtil.getRegionRelativeChunkCoordinate(chunkZ); if (region.hasChunk(relChunkX, relChunkZ)) { // Load the chunk final DataInputStream dis = region.getChunkDataInputStream(relChunkX, relChunkZ); final CompoundTag chunk = NbtIo.read(dis); dis.close(); // Update the chunk updateChunk(world, chunk, chunkX, chunkZ, xMin, yMin, zMin, xMax, yMax, zMax); // Write the chunk final DataOutputStream dos = region.getChunkDataOutputStream(relChunkX, relChunkZ); NbtIo.write(chunk, dos); dos.close(); } } } } } private void updateChunk(final World world, final CompoundTag chunk, final int chunkX, final int chunkZ, final int xMin, final int yMin, final int zMin, final int xMax, final int yMax, final int zMax) { if (chunk != null && !chunk.isEmpty()) { final CompoundTag level = chunk.getCompound("Level"); log.debug("Processing chunk: " + chunkX + ":" + chunkZ); // Determine the included sections final int sectionMin = RegionUtil.getSectionForBlockCoordinate(yMin); final int sectionMax = RegionUtil.getSectionForBlockCoordinate(yMax); // For each section @SuppressWarnings("unchecked") final ListTag<CompoundTag> sections = (ListTag<CompoundTag>) level.getList("Sections"); for (int sectionY = sectionMin; sectionY <= sectionMax; sectionY++) { if (sectionY < sections.size()) { // Restore the section final CompoundTag section = sections.get(sectionY); updateSection(world, chunkX, chunkZ, section, xMin, yMin, zMin, xMax, yMax, zMax); } else { // Create the new section, then update it final CompoundTag section = new CompoundTag(); updateSection(world, chunkX, chunkZ, section, xMin, yMin, zMin, xMax, yMax, zMax); sections.add(section); } } } } private void updateSection(final World world, final int chunkX, final int chunkZ, final CompoundTag section, final int xMin, final int yMin, final int zMin, final int xMax, final int yMax, final int zMax) { if (section != null && !section.isEmpty()) { final int sectionY = section.getByte("Y"); // log.info("Processing section: " + sectionY); // Determine the included blocks final int sectionBlockMin = RegionUtil.getSectionBlockIntersection(sectionY, yMin); final int sectionBlockMax = RegionUtil.getSectionBlockIntersection(sectionY, yMax); for (int blockY = sectionBlockMin; blockY <= sectionBlockMax; blockY++) { // Determine the included blocks final int blockXMin = RegionUtil.getChunkBlockIntersection(chunkX, xMin); final int blockXMax = RegionUtil.getChunkBlockIntersection(chunkX, xMax); final int blockZMin = RegionUtil.getChunkBlockIntersection(chunkZ, zMin); final int blockZMax = RegionUtil.getChunkBlockIntersection(chunkZ, zMax); // For each block for (int blockX = blockXMin; blockX <= blockXMax; blockX++) { for (int blockZ = blockZMin; blockZ <= blockZMax; blockZ++) { updateBlock(world, section, blockX, blockY, blockZ); } } } } } private void updateBlock(final World world, final CompoundTag section, final int blockX, final int blockY, final int blockZ) { if (world != null && section != null && !section.isEmpty()) { // log.info("Processing block: " + blockX + ":" + blockY + ":" + blockZ); // Determine the relative block location final int relX = getChunkRelativeBlockCoordinate(blockX); final int relY = getSectionRelativeBlockCoordinate(blockY); final int relZ = getChunkRelativeBlockCoordinate(blockZ); // Get the block in the target world final BlockType block = world.getBlockAt(blockX, blockY, blockZ).getType(); log.debug("Setting template: " + blockX + ":" + blockY + ":" + blockZ + " to " + block.getId() + ":" + block.getData()); // Set the block type and data in the section final byte[] blocks = section.getByteArray("Blocks"); final DataLayer dataValues = new DataLayer(section.getByteArray("Data"), 4); blocks[relY << 8 | relZ << 4 | relX] = (byte) block.getId(); dataValues.set(relX, relY, relZ, block.getData()); } } private File getTemplateDir(final String name, final DimensionType type) { final File worldDir = new File(templatesDir, name); return new File(worldDir, name + "_" + type.getName()); } private RegionFile loadRegionFile(final String name, final DimensionType type, final int regionX, final int regionZ) throws IOException { RegionFile region = null; final String resourceName = "r." + regionX + "." + regionZ + ".mca"; final File templateDir = getTemplateDir(name, type); final File regionDir = new File(templateDir, "region"); final File regionFile = new File(regionDir, resourceName); if (regionFile.exists()) { region = new RegionFile(regionFile); } return region; } // private String printHeightMap(final int[] bytes, final int offset) { // final StringBuilder sb = new StringBuilder(); // for (int i = 0; i < bytes.length; i++) { // sb.append(bytes[i]); // if ((i + 1) % offset == 0) { // sb.append("\n"); // } else { // sb.append(" "); // return sb.toString(); }
package com.facebook.litho; import android.content.res.Resources; import android.content.res.TypedArray; import android.graphics.drawable.Drawable; import android.support.annotation.AttrRes; import android.support.annotation.ColorInt; import android.support.annotation.DimenRes; import android.support.annotation.Dimension; import android.support.annotation.DrawableRes; import android.support.annotation.Px; import android.support.annotation.StringRes; import android.util.SparseArray; import com.facebook.yoga.YogaAlign; import com.facebook.yoga.YogaBaselineFunction; import com.facebook.yoga.YogaFlexDirection; import com.facebook.yoga.YogaJustify; import com.facebook.yoga.YogaDirection; import com.facebook.yoga.YogaPositionType; import com.facebook.yoga.YogaWrap; import com.facebook.yoga.YogaEdge; import com.facebook.litho.reference.Reference; import com.facebook.yoga.YogaNodeAPI; import static android.support.annotation.Dimension.DP; /** * Class representing an empty InternalNode with a null ComponentLayout. All methods * have been overridden so no actions are performed, and no exceptions are thrown. */ class NoOpInternalNode extends InternalNode { @Override void init(YogaNodeAPI cssNode, ComponentContext componentContext, Resources resources) {} @Override void setComponent(Component component) { } @Px @Override public int getX() { return 0; } @Px @Override public int getY() { return 0; } @Px @Override public int getWidth() { return 0; } @Px @Override public int getHeight() { return 0; } @Px @Override public int getPaddingLeft() { return 0; } @Px @Override public int getPaddingTop() { return 0; } @Px @Override public int getPaddingRight() { return 0; } @Px @Override public int getPaddingBottom() { return 0; } @Override public void setCachedMeasuresValid(boolean valid) {} @Override public int getLastWidthSpec() { return 0; } @Override public void setLastWidthSpec(int widthSpec) {} @Override public int getLastHeightSpec() { return 0; } @Override public void setLastHeightSpec(int heightSpec) {} @Override void setLastMeasuredWidth(float lastMeasuredWidth) {} @Override void setLastMeasuredHeight(float lastMeasuredHeight) {} @Override void setDiffNode(DiffNode diffNode) {} @Override public InternalNode layoutDirection(YogaDirection direction) { return this; } @Override public InternalNode flexDirection(YogaFlexDirection direction) { return this; } @Override public InternalNode wrap(YogaWrap wrap) { return this; } @Override public InternalNode justifyContent(YogaJustify justifyContent) { return this; } @Override public InternalNode alignItems(YogaAlign alignItems) { return this; } @Override public InternalNode alignContent(YogaAlign alignContent) { return this; } @Override public InternalNode alignSelf(YogaAlign alignSelf) { return this; } @Override public InternalNode positionType(YogaPositionType positionType) { return this; } @Override public InternalNode flex(float flex) { return this; } @Override public InternalNode flexGrow(float flexGrow) { return this; } @Override public InternalNode flexShrink(float flexShrink) { return this; } @Override public InternalNode flexBasisPx(@Px int flexBasis) { return this; } @Override public InternalNode flexBasisAttr(@AttrRes int resId, @DimenRes int defaultResId) { return this; } @Override public InternalNode flexBasisAttr(@AttrRes int resId) { return this; } @Override public InternalNode flexBasisRes(@DimenRes int resId) { return this; } @Override public InternalNode flexBasisDip(@Dimension(unit = DP) int flexBasis) { return this; } @Override public InternalNode flexBasisPercent(float percent) { return this; } @Override public InternalNode importantForAccessibility(int importantForAccessibility) { return this; } @Override public InternalNode duplicateParentState(boolean duplicateParentState) { return this; } @Override public InternalNode marginPx(YogaEdge edge, @Px int margin) { return this; } @Override public InternalNode marginAttr( YogaEdge edge, @AttrRes int resId, @DimenRes int defaultResId) { return this; } @Override public InternalNode marginAttr(YogaEdge edge, @AttrRes int resId) { return this; } @Override public InternalNode marginRes(YogaEdge edge, @DimenRes int resId) { return this; } @Override public InternalNode marginDip(YogaEdge edge, @Dimension(unit = DP) int margin) { return this; } @Override public InternalNode marginPercent(YogaEdge edge, float percent) { return this; } @Override public InternalNode marginAuto(YogaEdge edge) { return this; } @Override public InternalNode paddingPx(YogaEdge edge, @Px int padding) {
package org.jenkinsci.plugins.workflow.support.actions; import hudson.model.Action; import hudson.model.InvisibleAction; import org.jenkinsci.plugins.workflow.graph.FlowNode; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; /** * Pause {@link FlowNode} Action. * <p/> * Simply marks the node as being a node that causes the build to pause e.g. an Input node. * * @author <a href="mailto:tom.fennelly@gmail.com">tom.fennelly@gmail.com</a> */ public class PauseAction extends InvisibleAction { private static final Logger LOGGER = Logger.getLogger(PauseAction.class.getName()); private String cause; private long startTime = System.currentTimeMillis(); private long endTime; public PauseAction(String cause) { this.cause = cause; } public String getCause() { return cause; } public void setCause(String cause) { this.cause = cause; } public long getStartTime() { return startTime; } public void setStartTime(long startTime) { this.startTime = startTime; } public long getEndTime() { return endTime; } public void setEndTime(long endTime) { this.endTime = endTime; } public boolean isPaused() { // The node is paused if the end time is not set on it. return (endTime == 0L); } /** * Get the pause duration for this flow node. * <p/> * If the node is paused, the duration will be calculated against the current time. * * @return The pause duration in milliseconds. */ public long getPauseDuration() { if (isPaused()) { return (System.currentTimeMillis() - startTime); } else { return (endTime - startTime); } } public static PauseAction getCurrentPause(FlowNode node) { List<PauseAction> pauseActions = getPauseActions(node); if (!pauseActions.isEmpty()) { return pauseActions.get(pauseActions.size() - 1); } return null; } public static void endCurrentPause(FlowNode node) { PauseAction currentPause = getCurrentPause(node); if (currentPause != null) { currentPause.setEndTime(System.currentTimeMillis()); } LOGGER.log(Level.FINE, "‘endCurrentPause’ was called for a FlowNode (‘{0}’) that does not have an active pause. ‘endCurrentPause’ may have already been called.", node.getDisplayName()); } /** * Simple helper method to test if the supplied node is a pause node. * @param node The node to test. * @return True if the node is pause node, otherwise false. */ public static boolean isPaused(FlowNode node) { PauseAction currentPause = getCurrentPause(node); if (currentPause != null) { return currentPause.isPaused(); } return false; } /** * Get the {@link PauseAction} instances for the supplied node. * @param node The node to be searched. * @return The {@link PauseAction} instances for the supplied node. Returns an empty list if there are none. */ public static List<PauseAction> getPauseActions(FlowNode node) { List<PauseAction> pauseActions = new ArrayList<PauseAction>(); List<Action> actions = node.getActions(); for (Action action : actions) { if (action instanceof PauseAction) { pauseActions.add((PauseAction) action); } } return pauseActions; } /** * get the aggregate pause duration of the supplied flow node. * @param node The node to calculate on. * @return The pause duration in milliseconds. */ public static long getPauseDuration(FlowNode node) { List<PauseAction> pauseActions = getPauseActions(node); long pauseDuration = 0L; for (PauseAction pauseAction : pauseActions) { pauseDuration += pauseAction.getPauseDuration(); } return pauseDuration; } }
package com.faforever.api.data.domain; import com.faforever.api.data.checks.BooleanChange; import com.faforever.api.data.checks.IsEntityOwner; import com.faforever.api.data.checks.permission.IsModerator; import com.faforever.api.data.listeners.MapVersionEnricher; import com.yahoo.elide.annotation.Audit; import com.yahoo.elide.annotation.Audit.Action; import com.yahoo.elide.annotation.ComputedAttribute; import com.yahoo.elide.annotation.Include; import com.yahoo.elide.annotation.UpdatePermission; import lombok.Setter; import org.hibernate.annotations.BatchSize; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.EntityListeners; import javax.persistence.FetchType; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.OneToOne; import javax.persistence.Table; import javax.persistence.Transient; import javax.validation.constraints.NotNull; import java.util.List; @Entity @Setter @EntityListeners(MapVersionEnricher.class) @Table(name = "map_version") @Include(rootLevel = true, type = MapVersion.TYPE_NAME) public class MapVersion extends AbstractEntity implements OwnableEntity { public static final String TYPE_NAME = "mapVersion"; private String description; private int maxPlayers; private int width; private int height; private int version; private String filename; private String folderName; private boolean ranked; private boolean hidden; private Map map; private MapVersionStatistics statistics; private String thumbnailUrlSmall; private String thumbnailUrlLarge; private String downloadUrl; private List<MapVersionReview> reviews; private MapVersionReviewsSummary reviewsSummary; private Ladder1v1Map ladder1v1Map; @UpdatePermission(expression = IsEntityOwner.EXPRESSION + " or " + IsModerator.EXPRESSION) @Column(name = "description") public String getDescription() { return description; } @Column(name = "max_players") @NotNull public int getMaxPlayers() { return maxPlayers; } @Column(name = "width") // FIXME: validation public int getWidth() { return width; } @Column(name = "height") // FIXME: validation public int getHeight() { return height; } @Column(name = "version") // FIXME: validation public int getVersion() { return version; } @Column(name = "filename") @NotNull public String getFilename() { return filename; } @UpdatePermission(expression = IsModerator.EXPRESSION + " or (" + IsEntityOwner.EXPRESSION + " and " + BooleanChange.TO_FALSE_EXPRESSION + ")") @Audit(action = Action.UPDATE, logStatement = "Updated map version `{0}` attribute ranked to: {1}", logExpressions = {"${mapVersion.id}", "${mapVersion.ranked}"}) @Column(name = "ranked") public boolean isRanked() { return ranked; } @UpdatePermission(expression = IsModerator.EXPRESSION + " or (" + IsEntityOwner.EXPRESSION + " and " + BooleanChange.TO_TRUE_EXPRESSION + ")") @Audit(action = Action.UPDATE, logStatement = "Updated map version `{0}` attribute hidden to: {1}", logExpressions = {"${mapVersion.id}", "${mapVersion.hidden}"}) @Column(name = "hidden") public boolean isHidden() { return hidden; } @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "map_id") @NotNull @BatchSize(size = 1000) public Map getMap() { return this.map; } @OneToOne(mappedBy = "mapVersion", fetch = FetchType.EAGER) public MapVersionStatistics getStatistics() { return statistics; } @Transient @ComputedAttribute public String getThumbnailUrlSmall() { return thumbnailUrlSmall; } @Transient @ComputedAttribute public String getThumbnailUrlLarge() { return thumbnailUrlLarge; } @Transient @ComputedAttribute public String getDownloadUrl() { return downloadUrl; } @Transient @ComputedAttribute public String getFolderName() { return folderName; } @OneToMany(mappedBy = "mapVersion") @UpdatePermission(expression = "Prefab.Role.All") public List<MapVersionReview> getReviews() { return reviews; } @OneToOne(mappedBy = "mapVersion") @UpdatePermission(expression = "Prefab.Role.All") public MapVersionReviewsSummary getReviewsSummary() { return reviewsSummary; } @Transient @Override public Login getEntityOwner() { return map.getAuthor(); } }
package org.openlmis.functional; import org.openlmis.UiUtils.CaptureScreenshotOnFailureListener; import org.openlmis.UiUtils.TestCaseHelper; import org.openlmis.pageobjects.HomePage; import org.openlmis.pageobjects.InitiateRnRPage; import org.openlmis.pageobjects.LoginPage; import org.springframework.test.context.transaction.TransactionConfiguration; import org.springframework.transaction.annotation.Transactional; import org.testng.annotations.*; import java.util.ArrayList; import java.util.List; @TransactionConfiguration(defaultRollback = true) @Transactional @Listeners(CaptureScreenshotOnFailureListener.class) public class ManageRights extends TestCaseHelper { @BeforeMethod(groups = {"admin"}) public void setUp() throws Exception { super.setup(); } @Test(groups = {"admin"}, dataProvider = "Data-Provider-Function-Positive") public void testOnlyCreateRight(String program, String userSIC, String password) throws Exception { List<String> rightsList = new ArrayList<String>(); rightsList.add("CREATE_REQUISITION"); rightsList.add("VIEW_REQUISITION"); setupTestDataToInitiateRnR(true, program, userSIC, "200", "openLmis", rightsList); LoginPage loginPage = new LoginPage(testWebDriver, baseUrlGlobal); HomePage homePage = loginPage.loginAs(userSIC, password); String[] expectedMenuItem = {"Create / Authorize", "View"}; homePage.verifySubMenuItems(expectedMenuItem); String periodDetails = homePage.navigateAndInitiateRnr(program); InitiateRnRPage initiateRnRPage = homePage.clickProceed(); initiateRnRPage.enterBeginningBalance("10"); initiateRnRPage.enterQuantityDispensed("10"); initiateRnRPage.enterQuantityReceived("10"); initiateRnRPage.submitRnR(); initiateRnRPage.verifyAuthorizeButtonNotPresent(); } @AfterMethod(groups = {"admin"}) public void tearDown() throws Exception { HomePage homePage = new HomePage(testWebDriver); homePage.logout(baseUrlGlobal); dbWrapper.deleteData(); dbWrapper.closeConnection(); } @DataProvider(name = "Data-Provider-Function-Positive") public Object[][] parameterIntTestProviderPositive() { return new Object[][]{ {"HIV", "storeincharge", "Admin123"} }; } }