answer
stringlengths
17
10.2M
package com.redhat.ceylon.compiler.js; import java.io.File; import java.io.IOException; import java.net.URI; import java.nio.file.Files; import java.util.Arrays; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; /** To run this you need to execute the nodetest ant target first. */ public class RunJsTest { static File tmpModules; @BeforeClass public static void setup() throws IOException { tmpModules = File.createTempFile("ceylon", "runjstest"); tmpModules.delete(); tmpModules.mkdir(); File sub = new File(tmpModules, "check/0.1"); sub.mkdirs(); File src = new File("build/test/proto/check/0.1"); for (File f : src.listFiles()) { Files.copy(f.toPath(), new File(sub, f.getName()).toPath()); } } @Test public void testModuleLoading() throws Exception { CeylonRunJsTool runner = new CeylonRunJsTool(); runner.setModuleVersion("misc/0.1"); runner.setRun("test"); runner.setRepository(Arrays.asList(new URI(tmpModules.getAbsolutePath()), new URI("build/runtime"), new URI("build/test/proto"))); runner.run(); } @AfterClass public static void cleanup() { tmpModules.delete(); } @Test public void testResources() throws Exception { //Compile a module with resources CeylonCompileJsTool compiler = new CeylonCompileJsTool(); compiler.setRepositoryAsStrings(Arrays.asList("build/runtime")); compiler.setSource(Arrays.asList(new File("src/test/resources/doc/highers.ceylon"))); compiler.setSkipSrcArchive(true); compiler.setResource(Arrays.asList(new File("src/test/resources/res_test"))); compiler.run(); //Run it, just to make sure the resources were exploded CeylonRunJsTool runner = new CeylonRunJsTool(); runner.setModuleVersion("default"); runner.setRun("run"); runner.setRepositoryAsStrings(Arrays.asList("build/runtime", "modules")); runner.run(); Assert.assertTrue("test.txt is missing", new File("modules/default/test.txt").exists()); Assert.assertTrue("another_test.txt is missing", new File("modules/default/another_test.txt").exists()); Assert.assertTrue("third.txt is missing", new File("modules/default/subdir/third.txt").exists()); } }
package org.joval.scap.xccdf.engine; import java.net.MalformedURLException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.NoSuchElementException; import scap.oval.common.ClassEnumeration; import scap.oval.common.GeneratorType; import scap.oval.definitions.core.OvalDefinitions; import scap.oval.results.ResultEnumeration; import scap.oval.results.DefinitionType; import scap.oval.variables.VariableType; import scap.xccdf.CcOperatorEnumType; import scap.xccdf.CheckContentRefType; import scap.xccdf.CheckType; import scap.xccdf.CheckExportType; import scap.xccdf.InstanceResultType; import scap.xccdf.ObjectFactory; import scap.xccdf.ResultEnumType; import org.joval.intf.scap.IScapContext; import org.joval.intf.scap.oval.IDefinitionFilter; import org.joval.intf.scap.oval.IDefinitions; import org.joval.intf.scap.oval.IEngine; import org.joval.intf.scap.oval.IResults; import org.joval.intf.scap.oval.IVariables; import org.joval.intf.scap.xccdf.SystemEnumeration; import org.joval.intf.scap.xccdf.IEngine.Message; import org.joval.intf.plugin.IPlugin; import org.joval.intf.xml.ITransformable; import org.joval.scap.oval.OvalException; import org.joval.scap.oval.OvalFactory; import org.joval.scap.xccdf.XccdfException; import org.joval.util.Producer; /** * XCCDF helper class for OVAL processing. * * @author David A. Solin * @version %I% %G% */ public class OvalHandler implements ISystem { private static final String NAMESPACE = SystemEnumeration.OVAL.namespace(); private Map<String, EngineData> engines; private IScapContext ctx; private Producer<Message> producer; public OvalHandler(IScapContext ctx, Producer<Message> producer) { this.ctx = ctx; this.producer = producer; engines = new HashMap<String, EngineData>(); } // Implement ISystem public String getNamespace() { return NAMESPACE; } public void add(CheckType check) throws Exception { if (!NAMESPACE.equals(check.getSystem())) { throw new IllegalArgumentException(check.getSystem()); } if (check.isSetCheckContent()) { // TBD (DAS): inline content is not supported } for (CheckContentRefType ref : check.getCheckContentRef()) { String href = ref.getHref(); EngineData ed = null; if (engines.containsKey(href)) { ed = engines.get(href); } else { try { ed = new EngineData(ctx.getOval(href)); engines.put(href, ed); } catch (NoSuchElementException e) { continue; } } // Add definition references to the filter if (ref.isSetName()) { ed.getFilter().addDefinition(ref.getName()); } else { // Add all the definitions IDefinitions definitions = ctx.getOval(href); for (scap.oval.definitions.core.DefinitionType definition : definitions.getOvalDefinitions().getDefinitions().getDefinition()) { ed.getFilter().addDefinition(definition.getId()); } } // Add variable exports to the variables for (CheckExportType export : check.getCheckExport()) { String ovalVariableId = export.getExportName(); String valueId = export.getValueId(); for (String s : ctx.getValues().get(valueId)) { ed.getVariables().addValue(ovalVariableId, s); } ed.getVariables().setComment(ovalVariableId, valueId); } } } public Collection<ITransformable> exec(IPlugin plugin) throws Exception { Collection<ITransformable> reports = new ArrayList<ITransformable>(); Iterator<Map.Entry<String, EngineData>> iter = engines.entrySet().iterator(); while(iter.hasNext()) { Map.Entry<String, EngineData> entry = iter.next(); if (entry.getValue().createEngine(plugin)) { plugin.getLogger().info("Created engine for href " + entry.getKey()); IEngine engine = entry.getValue().getEngine(); producer.sendNotify(Message.OVAL_ENGINE, engine); engine.run(); switch(engine.getResult()) { case OK: reports.add(engine.getResults()); break; case ERR: throw engine.getError(); } } else { plugin.getLogger().info("No engine created for href " + entry.getKey()); iter.remove(); } } return reports; } public IResult getResult(CheckType check, boolean multi) throws Exception { if (!NAMESPACE.equals(check.getSystem())) { throw new IllegalArgumentException(check.getSystem()); } for (CheckContentRefType ref : check.getCheckContentRef()) { if (engines.containsKey(ref.getHref())) { CheckData data = new CheckData(check.getNegate()); IResults ovalResult = engines.get(ref.getHref()).getEngine().getResults(); if (ref.isSetName()) { try { String definitionId = ref.getName(); ClassEnumeration definitionClass = ovalResult.getDefinition(definitionId).getClazz(); ResultEnumeration definitionResult = ovalResult.getDefinitionResult(definitionId); data.add(convertResult(definitionClass, definitionResult)); } catch (NoSuchElementException e) { data.add(ResultEnumType.UNKNOWN); } } else if (multi) { CheckResult cr = new CheckResult(); for (DefinitionType def : ovalResult.getDefinitionResults()) { data = new CheckData(check.getNegate()); String definitionId = def.getDefinitionId(); ClassEnumeration definitionClass = ovalResult.getDefinition(definitionId).getClazz(); ResultEnumeration definitionResult = ovalResult.getDefinitionResult(definitionId); data.add(convertResult(definitionClass, definitionResult)); InstanceResultType inst = Engine.FACTORY.createInstanceResultType(); inst.setValue(def.getDefinitionId()); cr.getResults().add(new CheckResult(data.getResult(CcOperatorEnumType.AND), check, inst)); } return cr; } else { for (DefinitionType def : ovalResult.getDefinitionResults()) { String definitionId = def.getDefinitionId(); ClassEnumeration definitionClass = def.getClazz(); ResultEnumeration definitionResult = ovalResult.getDefinitionResult(definitionId); data.add(convertResult(definitionClass, definitionResult)); } } return new CheckResult(data.getResult(CcOperatorEnumType.AND), check); } } return new CheckResult(ResultEnumType.NOTCHECKED, check); } // Private /** * Map an OVAL result to an XCCDF result. * * @see the SCAP specification document, Section 4.5.2: Mapping OVAL Results to XCCDF Results * */ private ResultEnumType convertResult(ClassEnumeration ce, ResultEnumeration re) { switch (re) { case ERROR: return ResultEnumType.ERROR; case FALSE: switch(ce) { case VULNERABILITY: case PATCH: return ResultEnumType.PASS; case COMPLIANCE: case INVENTORY: case MISCELLANEOUS: default: return ResultEnumType.FAIL; } case TRUE: switch(ce) { case VULNERABILITY: case PATCH: return ResultEnumType.FAIL; case COMPLIANCE: case INVENTORY: case MISCELLANEOUS: default: return ResultEnumType.PASS; } case NOT_APPLICABLE: return ResultEnumType.NOTAPPLICABLE; case NOT_EVALUATED: return ResultEnumType.NOTCHECKED; case UNKNOWN: default: return ResultEnumType.UNKNOWN; } } class EngineData { private IDefinitions definitions; private IDefinitionFilter filter; private IVariables variables; private IEngine engine; EngineData(IDefinitions definitions) { this.definitions = definitions; filter = OvalFactory.createDefinitionFilter(); variables = OvalFactory.createVariables(); } IDefinitionFilter getFilter() { return filter; } IVariables getVariables() { return variables; } boolean createEngine(IPlugin plugin) { if (filter.size() > 0) { engine = OvalFactory.createEngine(IEngine.Mode.DIRECTED, plugin); engine.setDefinitions(definitions); engine.setExternalVariables(variables); engine.setDefinitionFilter(filter); return true; } else { return false; } } IEngine getEngine() { return engine; } } }
package org.onlab.onos.store.link.impl; import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.collect.FluentIterable; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.Maps; import com.google.common.collect.SetMultimap; import org.apache.commons.lang3.RandomUtils; import org.apache.commons.lang3.concurrent.ConcurrentUtils; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.Service; import org.onlab.onos.cluster.ClusterService; import org.onlab.onos.cluster.ControllerNode; import org.onlab.onos.cluster.NodeId; import org.onlab.onos.net.AnnotationsUtil; import org.onlab.onos.net.ConnectPoint; import org.onlab.onos.net.DefaultAnnotations; import org.onlab.onos.net.DefaultLink; import org.onlab.onos.net.DeviceId; import org.onlab.onos.net.Link; import org.onlab.onos.net.SparseAnnotations; import org.onlab.onos.net.Link.Type; import org.onlab.onos.net.LinkKey; import org.onlab.onos.net.Provided; import org.onlab.onos.net.link.DefaultLinkDescription; import org.onlab.onos.net.link.LinkDescription; import org.onlab.onos.net.link.LinkEvent; import org.onlab.onos.net.link.LinkStore; import org.onlab.onos.net.link.LinkStoreDelegate; import org.onlab.onos.net.provider.ProviderId; import org.onlab.onos.store.AbstractStore; import org.onlab.onos.store.ClockService; import org.onlab.onos.store.Timestamp; import org.onlab.onos.store.cluster.messaging.ClusterCommunicationService; import org.onlab.onos.store.cluster.messaging.ClusterMessage; import org.onlab.onos.store.cluster.messaging.ClusterMessageHandler; import org.onlab.onos.store.cluster.messaging.MessageSubject; import org.onlab.onos.store.common.impl.Timestamped; import org.onlab.onos.store.serializers.DistributedStoreSerializers; import org.onlab.onos.store.serializers.KryoSerializer; import org.onlab.util.KryoPool; import org.onlab.util.NewConcurrentHashMap; import org.slf4j.Logger; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.Map.Entry; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor; import static org.onlab.onos.cluster.ControllerNodeToNodeId.toNodeId; import static org.onlab.onos.net.DefaultAnnotations.union; import static org.onlab.onos.net.DefaultAnnotations.merge; import static org.onlab.onos.net.Link.Type.DIRECT; import static org.onlab.onos.net.Link.Type.INDIRECT; import static org.onlab.onos.net.link.LinkEvent.Type.*; import static org.onlab.util.Tools.namedThreads; import static org.slf4j.LoggerFactory.getLogger; import static com.google.common.collect.Multimaps.synchronizedSetMultimap; import static com.google.common.base.Predicates.notNull; /** * Manages inventory of infrastructure links in distributed data store * that uses optimistic replication and gossip based techniques. */ @Component(immediate = true) @Service public class GossipLinkStore extends AbstractStore<LinkEvent, LinkStoreDelegate> implements LinkStore { private final Logger log = getLogger(getClass()); // Link inventory private final ConcurrentMap<LinkKey, ConcurrentMap<ProviderId, Timestamped<LinkDescription>>> linkDescs = new ConcurrentHashMap<>(); // Link instance cache private final ConcurrentMap<LinkKey, Link> links = new ConcurrentHashMap<>(); // Egress and ingress link sets private final SetMultimap<DeviceId, LinkKey> srcLinks = createSynchronizedHashMultiMap(); private final SetMultimap<DeviceId, LinkKey> dstLinks = createSynchronizedHashMultiMap(); // Remove links private final Map<LinkKey, Timestamp> removedLinks = Maps.newHashMap(); @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected ClockService clockService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected ClusterCommunicationService clusterCommunicator; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected ClusterService clusterService; private static final KryoSerializer SERIALIZER = new KryoSerializer() { @Override protected void setupKryoPool() { serializerPool = KryoPool.newBuilder() .register(DistributedStoreSerializers.COMMON) .register(InternalLinkEvent.class) .register(InternalLinkRemovedEvent.class) .register(LinkAntiEntropyAdvertisement.class) .register(LinkFragmentId.class) .build() .populate(1); } }; private ScheduledExecutorService executor; @Activate public void activate() { clusterCommunicator.addSubscriber( GossipLinkStoreMessageSubjects.LINK_UPDATE, new InternalLinkEventListener()); clusterCommunicator.addSubscriber( GossipLinkStoreMessageSubjects.LINK_REMOVED, new InternalLinkRemovedEventListener()); clusterCommunicator.addSubscriber( GossipLinkStoreMessageSubjects.LINK_ANTI_ENTROPY_ADVERTISEMENT, new InternalLinkAntiEntropyAdvertisementListener()); executor = newSingleThreadScheduledExecutor(namedThreads("link-anti-entropy-%d")); // TODO: Make these configurable long initialDelaySec = 5; long periodSec = 5; // start anti-entropy thread executor.scheduleAtFixedRate(new SendAdvertisementTask(), initialDelaySec, periodSec, TimeUnit.SECONDS); log.info("Started"); } @Deactivate public void deactivate() { linkDescs.clear(); links.clear(); srcLinks.clear(); dstLinks.clear(); log.info("Stopped"); } @Override public int getLinkCount() { return links.size(); } @Override public Iterable<Link> getLinks() { return Collections.unmodifiableCollection(links.values()); } @Override public Set<Link> getDeviceEgressLinks(DeviceId deviceId) { // lock for iteration synchronized (srcLinks) { return FluentIterable.from(srcLinks.get(deviceId)) .transform(lookupLink()) .filter(notNull()) .toSet(); } } @Override public Set<Link> getDeviceIngressLinks(DeviceId deviceId) { // lock for iteration synchronized (dstLinks) { return FluentIterable.from(dstLinks.get(deviceId)) .transform(lookupLink()) .filter(notNull()) .toSet(); } } @Override public Link getLink(ConnectPoint src, ConnectPoint dst) { return links.get(new LinkKey(src, dst)); } @Override public Set<Link> getEgressLinks(ConnectPoint src) { Set<Link> egress = new HashSet<>(); for (LinkKey linkKey : srcLinks.get(src.deviceId())) { if (linkKey.src().equals(src)) { egress.add(links.get(linkKey)); } } return egress; } @Override public Set<Link> getIngressLinks(ConnectPoint dst) { Set<Link> ingress = new HashSet<>(); for (LinkKey linkKey : dstLinks.get(dst.deviceId())) { if (linkKey.dst().equals(dst)) { ingress.add(links.get(linkKey)); } } return ingress; } @Override public LinkEvent createOrUpdateLink(ProviderId providerId, LinkDescription linkDescription) { DeviceId dstDeviceId = linkDescription.dst().deviceId(); Timestamp newTimestamp = clockService.getTimestamp(dstDeviceId); final Timestamped<LinkDescription> deltaDesc = new Timestamped<>(linkDescription, newTimestamp); LinkEvent event = createOrUpdateLinkInternal(providerId, deltaDesc); if (event != null) { log.info("Notifying peers of a link update topology event from providerId: " + "{} between src: {} and dst: {}", providerId, linkDescription.src(), linkDescription.dst()); try { notifyPeers(new InternalLinkEvent(providerId, deltaDesc)); } catch (IOException e) { log.info("Failed to notify peers of a link update topology event from providerId: " + "{} between src: {} and dst: {}", providerId, linkDescription.src(), linkDescription.dst()); } } return event; } private LinkEvent createOrUpdateLinkInternal( ProviderId providerId, Timestamped<LinkDescription> linkDescription) { LinkKey key = new LinkKey(linkDescription.value().src(), linkDescription.value().dst()); ConcurrentMap<ProviderId, Timestamped<LinkDescription>> descs = getLinkDescriptions(key); synchronized (descs) { // if the link was previously removed, we should proceed if and // only if this request is more recent. Timestamp linkRemovedTimestamp = removedLinks.get(key); if (linkRemovedTimestamp != null) { if (linkDescription.isNewer(linkRemovedTimestamp)) { removedLinks.remove(key); } else { return null; } } final Link oldLink = links.get(key); // update description createOrUpdateLinkDescription(descs, providerId, linkDescription); final Link newLink = composeLink(descs); if (oldLink == null) { return createLink(key, newLink); } return updateLink(key, oldLink, newLink); } } // Guarded by linkDescs value (=locking each Link) private Timestamped<LinkDescription> createOrUpdateLinkDescription( ConcurrentMap<ProviderId, Timestamped<LinkDescription>> existingLinkDescriptions, ProviderId providerId, Timestamped<LinkDescription> linkDescription) { // merge existing attributes and merge Timestamped<LinkDescription> existingLinkDescription = existingLinkDescriptions.get(providerId); if (existingLinkDescription != null && existingLinkDescription.isNewer(linkDescription)) { return null; } Timestamped<LinkDescription> newLinkDescription = linkDescription; if (existingLinkDescription != null) { SparseAnnotations merged = union(existingLinkDescription.value().annotations(), linkDescription.value().annotations()); newLinkDescription = new Timestamped<LinkDescription>( new DefaultLinkDescription( linkDescription.value().src(), linkDescription.value().dst(), linkDescription.value().type(), merged), linkDescription.timestamp()); } return existingLinkDescriptions.put(providerId, newLinkDescription); } // Creates and stores the link and returns the appropriate event. // Guarded by linkDescs value (=locking each Link) private LinkEvent createLink(LinkKey key, Link newLink) { if (newLink.providerId().isAncillary()) { // TODO: revisit ancillary only Link handling // currently treating ancillary only as down (not visible outside) return null; } links.put(key, newLink); srcLinks.put(newLink.src().deviceId(), key); dstLinks.put(newLink.dst().deviceId(), key); return new LinkEvent(LINK_ADDED, newLink); } // Updates, if necessary the specified link and returns the appropriate event. // Guarded by linkDescs value (=locking each Link) private LinkEvent updateLink(LinkKey key, Link oldLink, Link newLink) { if (newLink.providerId().isAncillary()) { // TODO: revisit ancillary only Link handling // currently treating ancillary only as down (not visible outside) return null; } if ((oldLink.type() == INDIRECT && newLink.type() == DIRECT) || !AnnotationsUtil.isEqual(oldLink.annotations(), newLink.annotations())) { links.put(key, newLink); // strictly speaking following can be ommitted srcLinks.put(oldLink.src().deviceId(), key); dstLinks.put(oldLink.dst().deviceId(), key); return new LinkEvent(LINK_UPDATED, newLink); } return null; } @Override public LinkEvent removeLink(ConnectPoint src, ConnectPoint dst) { final LinkKey key = new LinkKey(src, dst); DeviceId dstDeviceId = dst.deviceId(); Timestamp timestamp = clockService.getTimestamp(dstDeviceId); LinkEvent event = removeLinkInternal(key, timestamp); if (event != null) { log.info("Notifying peers of a link removed topology event for a link " + "between src: {} and dst: {}", src, dst); try { notifyPeers(new InternalLinkRemovedEvent(key, timestamp)); } catch (IOException e) { log.error("Failed to notify peers of a link removed topology event for a link " + "between src: {} and dst: {}", src, dst); } } return event; } private LinkEvent removeLinkInternal(LinkKey key, Timestamp timestamp) { ConcurrentMap<ProviderId, Timestamped<LinkDescription>> linkDescriptions = getLinkDescriptions(key); synchronized (linkDescriptions) { // accept removal request if given timestamp is newer than // the latest Timestamp from Primary provider ProviderId primaryProviderId = pickPrimaryProviderId(linkDescriptions); if (linkDescriptions.get(primaryProviderId).isNewer(timestamp)) { return null; } removedLinks.put(key, timestamp); Link link = links.remove(key); linkDescriptions.clear(); if (link != null) { srcLinks.remove(link.src().deviceId(), key); dstLinks.remove(link.dst().deviceId(), key); return new LinkEvent(LINK_REMOVED, link); } return null; } } private static <K, V> SetMultimap<K, V> createSynchronizedHashMultiMap() { return synchronizedSetMultimap(HashMultimap.<K, V>create()); } /** * @return primary ProviderID, or randomly chosen one if none exists */ private ProviderId pickPrimaryProviderId( ConcurrentMap<ProviderId, Timestamped<LinkDescription>> providerDescs) { ProviderId fallBackPrimary = null; for (Entry<ProviderId, Timestamped<LinkDescription>> e : providerDescs.entrySet()) { if (!e.getKey().isAncillary()) { return e.getKey(); } else if (fallBackPrimary == null) { // pick randomly as a fallback in case there is no primary fallBackPrimary = e.getKey(); } } return fallBackPrimary; } private Link composeLink(ConcurrentMap<ProviderId, Timestamped<LinkDescription>> linkDescriptions) { ProviderId primaryProviderId = pickPrimaryProviderId(linkDescriptions); Timestamped<LinkDescription> base = linkDescriptions.get(primaryProviderId); ConnectPoint src = base.value().src(); ConnectPoint dst = base.value().dst(); Type type = base.value().type(); DefaultAnnotations annotations = DefaultAnnotations.builder().build(); annotations = merge(annotations, base.value().annotations()); for (Entry<ProviderId, Timestamped<LinkDescription>> e : linkDescriptions.entrySet()) { if (primaryProviderId.equals(e.getKey())) { continue; } // TODO: should keep track of Description timestamp // and only merge conflicting keys when timestamp is newer // Currently assuming there will never be a key conflict between // providers // annotation merging. not so efficient, should revisit later annotations = merge(annotations, e.getValue().value().annotations()); } return new DefaultLink(primaryProviderId , src, dst, type, annotations); } private ConcurrentMap<ProviderId, Timestamped<LinkDescription>> getLinkDescriptions(LinkKey key) { return ConcurrentUtils.createIfAbsentUnchecked(linkDescs, key, NewConcurrentHashMap.<ProviderId, Timestamped<LinkDescription>>ifNeeded()); } private Timestamped<LinkDescription> getLinkDescription(LinkKey key, ProviderId providerId) { return getLinkDescriptions(key).get(providerId); } private final Function<LinkKey, Link> lookupLink = new LookupLink(); private Function<LinkKey, Link> lookupLink() { return lookupLink; } private final class LookupLink implements Function<LinkKey, Link> { @Override public Link apply(LinkKey input) { return links.get(input); } } private static final Predicate<Provided> IS_PRIMARY = new IsPrimary(); private static final Predicate<Provided> isPrimary() { return IS_PRIMARY; } private static final class IsPrimary implements Predicate<Provided> { @Override public boolean apply(Provided input) { return !input.providerId().isAncillary(); } } private void notifyDelegateIfNotNull(LinkEvent event) { if (event != null) { notifyDelegate(event); } } // TODO: should we be throwing exception? private void broadcastMessage(MessageSubject subject, Object event) throws IOException { ClusterMessage message = new ClusterMessage( clusterService.getLocalNode().id(), subject, SERIALIZER.encode(event)); clusterCommunicator.broadcast(message); } // TODO: should we be throwing exception? private void unicastMessage(NodeId recipient, MessageSubject subject, Object event) { try { ClusterMessage message = new ClusterMessage( clusterService.getLocalNode().id(), subject, SERIALIZER.encode(event)); clusterCommunicator.unicast(message, recipient); } catch (IOException e) { log.error("Failed to send a {} message to {}", subject.value(), recipient); } } private void notifyPeers(InternalLinkEvent event) throws IOException { broadcastMessage(GossipLinkStoreMessageSubjects.LINK_UPDATE, event); } private void notifyPeers(InternalLinkRemovedEvent event) throws IOException { broadcastMessage(GossipLinkStoreMessageSubjects.LINK_REMOVED, event); } private void notifyPeer(NodeId peer, InternalLinkEvent event) { unicastMessage(peer, GossipLinkStoreMessageSubjects.LINK_UPDATE, event); } private void notifyPeer(NodeId peer, InternalLinkRemovedEvent event) { unicastMessage(peer, GossipLinkStoreMessageSubjects.LINK_REMOVED, event); } private final class SendAdvertisementTask implements Runnable { @Override public void run() { if (Thread.currentThread().isInterrupted()) { log.info("Interrupted, quitting"); return; } try { final NodeId self = clusterService.getLocalNode().id(); Set<ControllerNode> nodes = clusterService.getNodes(); ImmutableList<NodeId> nodeIds = FluentIterable.from(nodes) .transform(toNodeId()) .toList(); if (nodeIds.size() == 1 && nodeIds.get(0).equals(self)) { log.info("No other peers in the cluster."); return; } NodeId peer; do { int idx = RandomUtils.nextInt(0, nodeIds.size()); peer = nodeIds.get(idx); } while (peer.equals(self)); LinkAntiEntropyAdvertisement ad = createAdvertisement(); if (Thread.currentThread().isInterrupted()) { log.info("Interrupted, quitting"); return; } try { unicastMessage(peer, GossipLinkStoreMessageSubjects.LINK_ANTI_ENTROPY_ADVERTISEMENT, ad); } catch (Exception e) { log.error("Failed to send anti-entropy advertisement", e); return; } } catch (Exception e) { // catch all Exception to avoid Scheduled task being suppressed. log.error("Exception thrown while sending advertisement", e); } } } private LinkAntiEntropyAdvertisement createAdvertisement() { final NodeId self = clusterService.getLocalNode().id(); Map<LinkFragmentId, Timestamp> linkTimestamps = new HashMap<>(linkDescs.size()); Map<LinkKey, Timestamp> linkTombstones = new HashMap<>(removedLinks.size()); for (Entry<LinkKey, ConcurrentMap<ProviderId, Timestamped<LinkDescription>>> provs : linkDescs.entrySet()) { final LinkKey linkKey = provs.getKey(); final ConcurrentMap<ProviderId, Timestamped<LinkDescription>> linkDesc = provs.getValue(); synchronized (linkDesc) { for (Map.Entry<ProviderId, Timestamped<LinkDescription>> e : linkDesc.entrySet()) { linkTimestamps.put(new LinkFragmentId(linkKey, e.getKey()), e.getValue().timestamp()); } } } linkTombstones.putAll(removedLinks); return new LinkAntiEntropyAdvertisement(self, linkTimestamps, linkTombstones); } private void handleAntiEntropyAdvertisement(LinkAntiEntropyAdvertisement advertisement) { NodeId peer = advertisement.sender(); Map<LinkFragmentId, Timestamp> linkTimestamps = advertisement.linkTimestamps(); Map<LinkKey, Timestamp> linkTombstones = advertisement.linkTombstones(); for (Map.Entry<LinkFragmentId, Timestamp> entry : linkTimestamps.entrySet()) { LinkFragmentId linkFragmentId = entry.getKey(); Timestamp peerTimestamp = entry.getValue(); LinkKey key = linkFragmentId.linkKey(); ProviderId providerId = linkFragmentId.providerId(); Timestamped<LinkDescription> linkDescription = getLinkDescription(key, providerId); if (linkDescription.isNewer(peerTimestamp)) { // I have more recent link description. update peer. notifyPeer(peer, new InternalLinkEvent(providerId, linkDescription)); } // else TODO: Peer has more recent link description. request it. Timestamp linkRemovedTimestamp = removedLinks.get(key); if (linkRemovedTimestamp != null && linkRemovedTimestamp.compareTo(peerTimestamp) > 0) { // peer has a zombie link. update peer. notifyPeer(peer, new InternalLinkRemovedEvent(key, linkRemovedTimestamp)); } } for (Map.Entry<LinkKey, Timestamp> entry : linkTombstones.entrySet()) { LinkKey key = entry.getKey(); Timestamp peerTimestamp = entry.getValue(); ProviderId primaryProviderId = pickPrimaryProviderId(getLinkDescriptions(key)); if (primaryProviderId != null) { if (!getLinkDescription(key, primaryProviderId).isNewer(peerTimestamp)) { notifyDelegateIfNotNull(removeLinkInternal(key, peerTimestamp)); } } } } private class InternalLinkEventListener implements ClusterMessageHandler { @Override public void handle(ClusterMessage message) { log.info("Received link event from peer: {}", message.sender()); InternalLinkEvent event = (InternalLinkEvent) SERIALIZER.decode(message.payload()); ProviderId providerId = event.providerId(); Timestamped<LinkDescription> linkDescription = event.linkDescription(); notifyDelegateIfNotNull(createOrUpdateLinkInternal(providerId, linkDescription)); } } private class InternalLinkRemovedEventListener implements ClusterMessageHandler { @Override public void handle(ClusterMessage message) { log.info("Received link removed event from peer: {}", message.sender()); InternalLinkRemovedEvent event = (InternalLinkRemovedEvent) SERIALIZER.decode(message.payload()); LinkKey linkKey = event.linkKey(); Timestamp timestamp = event.timestamp(); notifyDelegateIfNotNull(removeLinkInternal(linkKey, timestamp)); } } private final class InternalLinkAntiEntropyAdvertisementListener implements ClusterMessageHandler { @Override public void handle(ClusterMessage message) { log.info("Received Link Anti-Entropy advertisement from peer: {}", message.sender()); LinkAntiEntropyAdvertisement advertisement = SERIALIZER.decode(message.payload()); handleAntiEntropyAdvertisement(advertisement); } } }
package de.ddb.pdc.metadata; import org.junit.Before; import org.junit.Test; import org.springframework.web.client.RestTemplate; import java.util.ArrayList; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.List; import static org.junit.Assert.*; import static org.mockito.Mockito.*; public class MetaFetcherImplTest { private MetaFetcherImpl fetcher; private RestTemplate rest; @Before public void setUp() { rest = mock(RestTemplate.class); fetcher = new MetaFetcherImpl(rest, "authkey"); } @Test public void search() { SearchResultItem resultItem = mock(SearchResultItem.class); when(resultItem.getId()).thenReturn("abcde"); when(resultItem.getTitle()).thenReturn("Titel"); when(resultItem.getSubtitle()).thenReturn("Untertitel"); when(resultItem.getThumbnail()).thenReturn("/thumbnail.jpg"); ArrayList<SearchResultItem> resultItems = new ArrayList<>(); resultItems.add(resultItem); SearchResults results = mock(SearchResults.class); when(results.getResultItems()).thenReturn(resultItems); String url = "https://api.deutsche-digitale-bibliothek.de/search?oauth_consumer_key=authkey&sort=RELEVANCE&rows=10&query=Titel"; when(rest.getForObject(url, SearchResults.class)).thenReturn(results); DDBItem[] items = fetcher.searchForItems("Titel", 10); assertEquals(1, items.length); assertEquals("abcde", items[0].getId()); assertEquals("Titel", items[0].getTitle()); assertEquals("Untertitel", items[0].getSubtitle()); assertEquals("https: items[0].getImageUrl()); } @Test public void fetch() { EntitiesResultItem eri = mock(EntitiesResultItem.class); when(eri.getName()).thenReturn("Johann Wolfgang von Goethe"); when(eri.getPlaceOfBirth()).thenReturn("Frankfurt am Main"); when(eri.getYearOfBirth()).thenReturn(1749); when(eri.getYearOfDeath()).thenReturn(1832); EntitiesResult er = mock(EntitiesResult.class); when(er.getResultItem()).thenReturn(eri); RDFItem rdf = mock(RDFItem.class); String authorId = "http://d-nb.info/gnd/118540238"; List<String> authorIds = new ArrayList<>(); authorIds.add(authorId); when(rdf.getAuthorIds()).thenReturn(authorIds); when(rdf.getInstitution()).thenReturn("Deutsche Digitale Bibliothek"); when(rdf.getPublishYear()).thenReturn(1946); EDMItem edm = mock(EDMItem.class); when(edm.getRdf()).thenReturn(rdf); ItemAipResult result = mock(ItemAipResult.class); when(result.getRDFItem()).thenReturn(rdf); String url = "https://api.deutsche-digitale-bibliothek.de/items/itemId/aip?oauth_consumer_key=authkey"; String url2 = "https: when(rest.getForObject(url, ItemAipResult.class)).thenReturn(result); when(rest.getForObject(url2, EntitiesResult.class)).thenReturn(er); DDBItem ddbItem = fetcher.fetchMetadata("itemId"); assertEquals("Deutsche Digitale Bibliothek",ddbItem.getInstitution()); assertEquals(1946,ddbItem.getPublishedYear().get(Calendar.YEAR)); assertEquals("http://d-nb.info/gnd/118540238", ddbItem.getAuthors().get(0).getDnbId()); Author author = ddbItem.getAuthors().get(0); assertEquals("Johann Wolfgang von Goethe",author.getName()); assertEquals(1749, author.getYearOfBirth().get(Calendar.YEAR)); assertEquals(1832, author.getYearOfDeath().get(Calendar.YEAR)); assertEquals("Frankfurt am Main", author.getPlaceOfBirth()); } }
package fr.wseduc.cas.test.data; import java.util.HashMap; import java.util.Map; import fr.wseduc.cas.async.Handler; import fr.wseduc.cas.data.DataHandler; import fr.wseduc.cas.entities.AuthCas; import fr.wseduc.cas.entities.User; import fr.wseduc.cas.exceptions.AuthenticationException; import fr.wseduc.cas.exceptions.Try; import fr.wseduc.cas.http.Request; public class TestDataHandler extends DataHandler { private static final Map<String, String> tickets = new HashMap<>(); protected TestDataHandler(Request request) { super(request); } @Override public void validateService(AuthCas authCas,String service, Handler<Boolean> handler) { } @Override public void authenticateUser(String user, String password, AuthCas authCas, Handler<Try<AuthenticationException, AuthCas>> handler) { } @Override protected void getAuthByProxyGrantingTicket(String pgt, Handler<AuthCas> handler) { } @Override protected void getUser(AuthCas authCas, String service, Handler<User> handler) { } @Override protected void getAuth(String ticket, Handler<AuthCas> handler) { } @Override protected void getAuthByProxyTicket(String ticket, Handler<AuthCas> handler) { } @Override public void getOrCreateAuth(Request request, Handler<AuthCas> handler) { } @Override public void persistAuth(AuthCas authCas, Handler<Boolean> handler) { } @Override public void getAndDestroyAuth(Request request, Handler<AuthCas> handler) { } @Override public void getAndDestroyAuth(String user, Handler<AuthCas> handler) { } }
package info.limpet.data; import info.limpet.ICollection; import info.limpet.ICommand; import info.limpet.data.impl.ObjectCollection; import info.limpet.data.impl.QuantityCollection; import info.limpet.data.impl.TemporalQuantityCollection; import info.limpet.data.operations.AddQuantityOperation; import info.limpet.data.operations.CollectionComplianceTests; import info.limpet.data.operations.MultiplyQuantityOperation; import info.limpet.data.store.InMemoryStore; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import javax.measure.Quantity; import javax.measure.Unit; import javax.measure.quantity.Dimensionless; import javax.measure.quantity.Length; import javax.measure.quantity.Speed; import junit.framework.TestCase; import tec.units.ri.quantity.Quantities; import tec.units.ri.unit.MetricPrefix; import tec.units.ri.unit.Units; public class TestOperations extends TestCase { public void testAppliesTo() { // the units for this measurement Unit<Speed> kmh = MetricPrefix.KILO(Units.METRE).divide(Units.HOUR) .asType(Speed.class); Unit<Speed> kmm = MetricPrefix.KILO(Units.METRE).divide(Units.MINUTE) .asType(Speed.class); Unit<Length> m = (Units.METRE).asType(Length.class); // the target collection QuantityCollection<Speed> speed_good_1 = new QuantityCollection<Speed>( "Speed 1", kmh); QuantityCollection<Speed> speed_good_2 = new QuantityCollection<Speed>( "Speed 2", kmh); QuantityCollection<Speed> speed_longer = new QuantityCollection<Speed>( "Speed 3", kmh); QuantityCollection<Speed> speed_diff_units = new QuantityCollection<Speed>( "Speed 4", kmm); QuantityCollection<Length> len1 = new QuantityCollection<Length>( "Length 1", m); TemporalQuantityCollection<Speed> temporal_speed_1 = new TemporalQuantityCollection<Speed>( "Speed 5", kmh); TemporalQuantityCollection<Speed> temporal_speed_2 = new TemporalQuantityCollection<Speed>( "Speed 6", kmh); ObjectCollection<String> string_1 = new ObjectCollection<>("strings 1"); ObjectCollection<String> string_2 = new ObjectCollection<>("strings 2"); for (int i = 1; i <= 10; i++) { // create a measurement double thisSpeed = i * 2; Quantity<Speed> speedVal1 = Quantities.getQuantity(thisSpeed, kmh); Quantity<Speed> speedVal2 = Quantities.getQuantity(thisSpeed*2, kmh); Quantity<Speed> speedVal3 = Quantities.getQuantity(thisSpeed/2, kmh); Quantity<Speed> speedVal4 = Quantities.getQuantity(thisSpeed/2, kmm); Quantity<Length> lenVal1 = Quantities.getQuantity(thisSpeed/2, m); // store the measurements speed_good_1.add( speedVal1); speed_good_2.add( speedVal2); speed_longer.add( speedVal3); speed_diff_units.add( speedVal4); temporal_speed_1.add(i, speedVal2); temporal_speed_2.add(i, speedVal3); len1.add(lenVal1); string_1.add(i + " "); string_2.add(i + "a "); } Quantity<Speed> speedVal3a = Quantities.getQuantity(2, kmh); speed_longer.add( speedVal3a); List<ICollection> selection = new ArrayList<ICollection>(3); CollectionComplianceTests testOp = new CollectionComplianceTests(); selection.clear(); selection.add(speed_good_1); selection.add(speed_good_2); assertTrue("all same dim", testOp.allEqualDimensions(selection)); assertTrue("all same units", testOp.allEqualUnits(selection)); assertTrue("all same length", testOp.allEqualLength(selection)); assertTrue("all quantities", testOp.allQuantity(selection)); assertFalse("all temporal", testOp.allTemporal(selection)); selection.clear(); selection.add(speed_good_1); selection.add(speed_good_2); selection.add(speed_diff_units); assertTrue("all same dim", testOp.allEqualDimensions(selection)); assertFalse("all same units", testOp.allEqualUnits(selection)); assertTrue("all same length", testOp.allEqualLength(selection)); assertTrue("all quantities", testOp.allQuantity(selection)); assertFalse("all temporal", testOp.allTemporal(selection)); selection.clear(); selection.add(speed_good_1); selection.add(speed_good_2); selection.add(len1); assertFalse("all same dim", testOp.allEqualDimensions(selection)); assertFalse("all same units", testOp.allEqualUnits(selection)); assertTrue("all same length", testOp.allEqualLength(selection)); assertTrue("all quantities", testOp.allQuantity(selection)); assertFalse("all temporal", testOp.allTemporal(selection)); selection.clear(); selection.add(speed_good_1); selection.add(speed_good_2); selection.add(speed_longer); assertTrue("all same dim", testOp.allEqualDimensions(selection)); assertTrue("all same units", testOp.allEqualUnits(selection)); assertFalse("all same length", testOp.allEqualLength(selection)); assertTrue("all quantities", testOp.allQuantity(selection)); assertFalse("all temporal", testOp.allTemporal(selection)); selection.clear(); selection.add(temporal_speed_1); selection.add(temporal_speed_2); assertTrue("all same dim", testOp.allEqualDimensions(selection)); assertTrue("all same units", testOp.allEqualUnits(selection)); assertTrue("all same length", testOp.allEqualLength(selection)); assertTrue("all quantities", testOp.allQuantity(selection)); assertTrue("all temporal", testOp.allTemporal(selection)); selection.clear(); selection.add(temporal_speed_1); selection.add(string_1); assertFalse("all same dim", testOp.allEqualDimensions(selection)); assertFalse("all same units", testOp.allEqualUnits(selection)); assertTrue("all same length", testOp.allEqualLength(selection)); assertFalse("all quantities", testOp.allQuantity(selection)); assertFalse("all temporal", testOp.allTemporal(selection)); selection.clear(); selection.add(string_1); selection.add(string_1); assertFalse("all same dim", testOp.allEqualDimensions(selection)); assertFalse("all same units", testOp.allEqualUnits(selection)); assertTrue("all same length", testOp.allEqualLength(selection)); assertTrue("all non quantities", testOp.allNonQuantity(selection)); assertFalse("all temporal", testOp.allTemporal(selection)); // ok, let's try one that works selection.clear(); selection.add(speed_good_1); selection.add(speed_good_2); InMemoryStore store = new InMemoryStore(); assertEquals("store empty", 0, store.size()); @SuppressWarnings({ "unchecked", "rawtypes" }) Collection<ICommand<ICollection>> actions = new AddQuantityOperation().actionsFor(selection, store ); assertEquals("correct number of actions returned", 1, actions.size()); ICommand<?> addAction = actions.iterator().next(); addAction.execute(); assertEquals("new collection added to store", 1, store.size()); ICollection firstItem = store.iterator().next(); ICommand<?> precedent = firstItem.getPrecedent(); assertNotNull("has precedent", precedent); assertEquals("Correct name", "Add series", precedent.getTitle()); List<? extends ICollection> inputs = precedent.getInputs(); assertEquals("Has both precedents", 2, inputs.size()); Iterator<? extends ICollection> iIter = inputs.iterator(); while (iIter.hasNext()) { ICollection thisC = (ICollection) iIter.next(); List<ICommand<?>> deps = thisC.getDependents(); assertEquals("has a depedent", 1, deps.size()); Iterator<ICommand<?>> dIter = deps.iterator(); while (dIter.hasNext()) { ICommand<?> iCommand = dIter.next(); assertEquals("Correct dependent", precedent, iCommand); } } List<? extends ICollection> outputs = precedent.getOutputs(); assertEquals("Has both dependents", 1, outputs.size()); Iterator<? extends ICollection> oIter = outputs.iterator(); while (oIter.hasNext()) { ICollection thisC = (ICollection) oIter.next(); ICommand<?> dep = thisC.getPrecedent(); assertNotNull("has a depedent", dep); assertEquals("Correct dependent", precedent, dep); } } public void testDimensionlessMultiply() { QuantityCollection<Dimensionless> factor = new QuantityCollection<>("Factor 4", Units.ONE); // the units for this measurement Unit<Speed> kmh = MetricPrefix.KILO(Units.METRE).divide(Units.HOUR) .asType(Speed.class); Unit<Speed> kmm = MetricPrefix.KILO(Units.METRE).divide(Units.MINUTE) .asType(Speed.class); Unit<Length> m = (Units.METRE).asType(Length.class); // the target collection QuantityCollection<Speed> speed_good_1 = new QuantityCollection<Speed>( "Speed 1", kmh); QuantityCollection<Speed> speed_good_2 = new QuantityCollection<Speed>( "Speed 2", kmh); QuantityCollection<Speed> speed_longer = new QuantityCollection<Speed>( "Speed 3", kmh); QuantityCollection<Speed> speed_diff_units = new QuantityCollection<Speed>( "Speed 4", kmm); QuantityCollection<Length> len1 = new QuantityCollection<Length>( "Length 1", m); TemporalQuantityCollection<Speed> temporal_speed_1 = new TemporalQuantityCollection<Speed>( "Speed 5", kmh); TemporalQuantityCollection<Speed> temporal_speed_2 = new TemporalQuantityCollection<Speed>( "Speed 6", kmh); ObjectCollection<String> string_1 = new ObjectCollection<>("strings 1"); for (int i = 1; i <= 10; i++) { // create a measurement double thisSpeed = i * 2; Quantity<Speed> speedVal1 = Quantities.getQuantity(thisSpeed, kmh); Quantity<Speed> speedVal2 = Quantities.getQuantity(thisSpeed*2, kmh); Quantity<Speed> speedVal3 = Quantities.getQuantity(thisSpeed/2, kmh); Quantity<Speed> speedVal4 = Quantities.getQuantity(thisSpeed/2, kmm); Quantity<Length> lenVal1 = Quantities.getQuantity(thisSpeed/2, m); // store the measurements speed_good_1.add( speedVal1); speed_good_2.add( speedVal2); speed_longer.add( speedVal3); speed_diff_units.add( speedVal4); temporal_speed_1.add(i, speedVal2); temporal_speed_2.add(i, speedVal3); len1.add(lenVal1); string_1.add(i + " "); } // give the singleton a value factor.add(4); // ok, let's try one that works List<ICollection> selection = new ArrayList<ICollection>(3); // place to store results data InMemoryStore store = new InMemoryStore(); // TEST INVALID PERMUTATIONS selection.clear(); selection.add(speed_good_1); selection.add(string_1); Collection<ICommand<ICollection>> commands = new MultiplyQuantityOperation().actionsFor(selection, store ); assertEquals("invalid collections - not both quantities", 0, commands.size()); selection.clear(); selection.add(speed_good_1); selection.add(len1); assertEquals("store empty", 0, store.size()); commands = new MultiplyQuantityOperation().actionsFor(selection, store ); assertEquals("valid collections - both quantities", 1, commands.size()); selection.clear(); selection.add(speed_good_1); selection.add(speed_good_2); store.clear(); assertEquals("store empty", 0, store.size()); commands = new MultiplyQuantityOperation().actionsFor(selection, store ); assertEquals("valid collections - both speeds", 1, commands.size()); // now test valid collections selection.clear(); selection.add(speed_good_1); selection.add(factor); assertEquals("store empty", 0, store.size()); commands = new MultiplyQuantityOperation().actionsFor(selection, store ); assertEquals("valid collections - one is singleton", 1, commands.size()); ICommand<ICollection> command = commands.iterator().next(); // test actions has single item: "Multiply series by constant" assertEquals("correct name", "Multiply Series", command.getTitle()); // apply action command.execute(); // test store has a new item in it assertEquals("store not empty", 1, store.size()); ICollection newS = store.get(MultiplyQuantityOperation.SERIES_NAME); // test results is same length as thisSpeed assertEquals("correct size", 10, newS.size()); selection.clear(); selection.add(speed_good_1); selection.add(factor); store.clear(); assertEquals("store empty", 0, store.size()); commands = new MultiplyQuantityOperation().actionsFor(selection, store ); assertEquals("valid collections - one is singleton", 1, commands.size()); // TODO: run operation, check the new series is in the store // TODO: check the new series is of the correct length selection.clear(); selection.add(speed_good_1); selection.add(speed_diff_units); store.clear(); assertEquals("store empty", 0, store.size()); commands = new MultiplyQuantityOperation().actionsFor(selection, store ); // TODO: we should get a series returned - they're both speeds // TODO: run operation, check the new series is in the store // TODO: check the new series is of the correct length // TODO: check the new series is of the correct units, and that the first item has been converted appropriately } }
package org.junit.tests.assertion; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.*; import java.io.IOException; import java.math.BigDecimal; import java.util.Comparator; import org.junit.Assert; import org.junit.Assert.ThrowingRunnable; import org.junit.ComparisonFailure; import org.junit.Test; import org.junit.internal.ArrayComparisonFailure; /** * Tests for {@link org.junit.Assert} */ public class AssertionTest { // If you want to use 1.4 assertions, they will be reported correctly. // However, you need to add the -ea VM argument when running. // @Test (expected=AssertionError.class) public void error() { // assert false; @Test(expected = AssertionError.class) public void fails() { Assert.fail(); } @Test public void failWithNoMessageToString() { try { Assert.fail(); } catch (AssertionError exception) { assertEquals("java.lang.AssertionError", exception.toString()); } } @Test public void failWithMessageToString() { try { Assert.fail("woops!"); } catch (AssertionError exception) { assertEquals("java.lang.AssertionError: woops!", exception.toString()); } } @Test(expected = AssertionError.class) public void arraysNotEqual() { assertArrayEquals((new Object[] { new Object() }), (new Object[] { new Object() })); } @Test(expected = AssertionError.class) public void arraysNotEqualWithMessage() { assertArrayEquals("not equal", (new Object[] { new Object() }), (new Object[] { new Object() })); } @Test public void arraysExpectedNullMessage() { try { assertArrayEquals("not equal", null, (new Object[] { new Object() })); } catch (AssertionError exception) { assertEquals("not equal: expected array was null", exception.getMessage()); } } @Test public void arraysActualNullMessage() { try { assertArrayEquals("not equal", (new Object[] { new Object() }), null); } catch (AssertionError exception) { assertEquals("not equal: actual array was null", exception.getMessage()); } } @Test public void arraysDifferentLengthMessage() { try { assertArrayEquals("not equal", (new Object[0]), (new Object[1])); } catch (AssertionError exception) { assertEquals( "not equal: array lengths differed, expected.length=0 actual.length=1", exception.getMessage()); } } @Test(expected = ArrayComparisonFailure.class) public void arraysElementsDiffer() { assertArrayEquals("not equal", (new Object[] { "this is a very long string in the middle of an array" }), (new Object[] { "this is another very long string in the middle of an array" })); } @Test public void arraysDifferAtElement0nullMessage() { try { assertArrayEquals((new Object[] { true }), (new Object[] { false })); } catch (AssertionError exception) { assertEquals( "arrays first differed at element [0]; expected:<true> but was:<false>", exception.getMessage()); } } @Test public void arraysDifferAtElement1nullMessage() { try { assertArrayEquals((new Object[] { true, true }), (new Object[] { true, false })); } catch (AssertionError exception) { assertEquals( "arrays first differed at element [1]; expected:<true> but was:<false>", exception.getMessage()); } } @Test public void arraysDifferAtElement0withMessage() { try { assertArrayEquals("message", (new Object[] { true }), (new Object[] { false })); } catch (AssertionError exception) { assertEquals( "message: arrays first differed at element [0]; expected:<true> but was:<false>", exception.getMessage()); } } @Test public void arraysDifferAtElement1withMessage() { try { assertArrayEquals("message", (new Object[] { true, true }), (new Object[] { true, false })); fail(); } catch (AssertionError exception) { assertEquals( "message: arrays first differed at element [1]; expected:<true> but was:<false>", exception.getMessage()); } } @Test public void multiDimensionalArraysAreEqual() { assertArrayEquals((new Object[][] { { true, true }, { false, false } }), (new Object[][] { { true, true }, { false, false } })); } @Test public void multiDimensionalIntArraysAreEqual() { int[][] int1 = { { 1, 2, 3 }, { 4, 5, 6 } }; int[][] int2 = { { 1, 2, 3 }, { 4, 5, 6 } }; assertArrayEquals(int1, int2); } @Test public void oneDimensionalPrimitiveArraysAreEqual() { assertArrayEquals(new boolean[] { true }, new boolean[] { true }); assertArrayEquals(new byte[] { 1 }, new byte[] { 1 }); assertArrayEquals(new char[] { 1 }, new char[] { 1 }); assertArrayEquals(new short[] { 1 }, new short[] { 1 }); assertArrayEquals(new int[] { 1 }, new int[] { 1 }); assertArrayEquals(new long[] { 1 }, new long[] { 1 }); assertArrayEquals(new double[] { 1.0 }, new double[] { 1.0 }, 1.0); assertArrayEquals(new float[] { 1.0f }, new float[] { 1.0f }, 1.0f); } @Test(expected = AssertionError.class) public void oneDimensionalDoubleArraysAreNotEqual() { assertArrayEquals(new double[] { 1.0 }, new double[] { 2.5 }, 1.0); } @Test(expected = AssertionError.class) public void oneDimensionalFloatArraysAreNotEqual() { assertArrayEquals(new float[] { 1.0f }, new float[] { 2.5f }, 1.0f); } @Test(expected = AssertionError.class) public void oneDimensionalBooleanArraysAreNotEqual() { assertArrayEquals(new boolean[] { true }, new boolean[] { false }); } @Test(expected = AssertionError.class) public void IntegerDoesNotEqualLong() { assertEquals(new Integer(1), new Long(1)); } @Test public void intsEqualLongs() { assertEquals(1, 1L); } @Test public void multiDimensionalArraysDeclaredAsOneDimensionalAreEqual() { assertArrayEquals( (new Object[] { new Object[] { true, true }, new Object[] { false, false } }), (new Object[] { new Object[] { true, true }, new Object[] { false, false } })); } @Test public void multiDimensionalArraysAreNotEqual() { try { assertArrayEquals("message", (new Object[][] { { true, true }, { false, false } }), (new Object[][] { { true, true }, { true, false } })); fail(); } catch (AssertionError exception) { assertEquals( "message: arrays first differed at element [1][0]; expected:<false> but was:<true>", exception.getMessage()); } } @Test public void multiDimensionalArraysAreNotEqualNoMessage() { try { assertArrayEquals( (new Object[][] { { true, true }, { false, false } }), (new Object[][] { { true, true }, { true, false } })); fail(); } catch (AssertionError exception) { assertEquals( "arrays first differed at element [1][0]; expected:<false> but was:<true>", exception.getMessage()); } } @Test public void multiDimensionalArraysDifferentLengthMessage() { try { assertArrayEquals("message", new Object[][] { { true, true }, { false, false } }, new Object[][] { { true, true }, { false } }); } catch (AssertionError exception) { assertEquals( "message: arrays first differed at element [1]; array lengths differed, expected.length=2 actual.length=1", exception.getMessage()); return; } fail("Expected AssertionError to be thrown"); } @Test public void multiDimensionalArraysDifferentLengthNoMessage() { try { assertArrayEquals( new Object[][] { { true, true }, { false, false } }, new Object[][] { { true, true }, { false } }); } catch (AssertionError exception) { assertEquals( "arrays first differed at element [1]; array lengths differed, expected.length=2 actual.length=1", exception.getMessage()); return; } fail("Expected AssertionError to be thrown"); } @Test public void arraysWithNullElementEqual() { Object[] objects1 = new Object[] { null }; Object[] objects2 = new Object[] { null }; assertArrayEquals(objects1, objects2); } @Test public void stringsDifferWithUserMessage() { try { assertEquals("not equal", "one", "two"); } catch (Throwable exception) { assertEquals("not equal expected:<[one]> but was:<[two]>", exception.getMessage()); } } @Test public void arraysEqual() { Object element = new Object(); Object[] objects1 = new Object[] { element }; Object[] objects2 = new Object[] { element }; assertArrayEquals(objects1, objects2); } @Test public void arraysEqualWithMessage() { Object element = new Object(); Object[] objects1 = new Object[] { element }; Object[] objects2 = new Object[] { element }; assertArrayEquals("equal", objects1, objects2); } @Test public void equals() { Object o = new Object(); assertEquals(o, o); assertEquals("abc", "abc"); assertEquals(true, true); assertEquals((byte) 1, (byte) 1); assertEquals('a', 'a'); assertEquals((short) 1, (short) 1); assertEquals(1, 1); // int by default, cast is unnecessary assertEquals(1l, 1l); assertEquals(1.0, 1.0, 0.0); assertEquals(1.0d, 1.0d, 0.0d); } @Test public <T> void greaterThan() { assertGreaterThan("a", "b", new Comparator<String>() { public int compare(String a, String b) { return b.compareTo(a); } }); } @Test(expected = AssertionError.class) public void notEqualsObjectWithNull() { assertEquals(new Object(), null); } @Test(expected = AssertionError.class) public void notEqualsNullWithObject() { assertEquals(null, new Object()); } @Test public void notEqualsObjectWithNullWithMessage() { Object o = new Object(); try { assertEquals("message", null, o); fail(); } catch (AssertionError e) { assertEquals( "message expected:<null> but was:<" + o.toString() + ">", e.getMessage()); } } @Test public void notEqualsNullWithObjectWithMessage() { Object o = new Object(); try { assertEquals("message", o, null); fail(); } catch (AssertionError e) { assertEquals( "message expected:<" + o.toString() + "> but was:<null>", e.getMessage()); } } @Test(expected = AssertionError.class) public void objectsNotEquals() { assertEquals(new Object(), new Object()); } @Test(expected = ComparisonFailure.class) public void stringsNotEqual() { assertEquals("abc", "def"); } @Test(expected = AssertionError.class) public void booleansNotEqual() { assertEquals(true, false); } @Test(expected = AssertionError.class) public void bytesNotEqual() { assertEquals((byte) 1, (byte) 2); } @Test(expected = AssertionError.class) public void charsNotEqual() { assertEquals('a', 'b'); } @Test(expected = AssertionError.class) public void shortsNotEqual() { assertEquals((short) 1, (short) 2); } @Test(expected = AssertionError.class) public void intsNotEqual() { assertEquals(1, 2); } @Test(expected = AssertionError.class) public void longsNotEqual() { assertEquals(1l, 2l); } @Test(expected = AssertionError.class) public void floatsNotEqual() { assertEquals(1.0, 2.0, 0.9); } @SuppressWarnings("deprecation") @Test(expected = AssertionError.class) public void floatsNotEqualWithoutDelta() { assertEquals(1.0, 1.1); } @Test public void floatsNotDoublesInArrays() { float delta = 4.444f; float[] f1 = new float[] { 1.111f }; float[] f2 = new float[] { 5.555f }; Assert.assertArrayEquals(f1, f2, delta); } @Test(expected = AssertionError.class) public void bigDecimalsNotEqual() { assertEquals(new BigDecimal("123.4"), new BigDecimal("123.0")); } @Test(expected = AssertionError.class) public void doublesNotEqual() { assertEquals(1.0d, 2.0d, 0.9d); } @Test public void naNsAreEqual() { assertEquals(Float.NaN, Float.NaN, Float.POSITIVE_INFINITY); assertEquals(Double.NaN, Double.NaN, Double.POSITIVE_INFINITY); } @SuppressWarnings("unused") @Test public void nullNullmessage() { try { assertNull("junit"); fail(); } catch (AssertionError e) { assertEquals("expected null, but was:<junit>", e.getMessage()); } } @SuppressWarnings("unused") @Test public void nullWithMessage() { try { assertNull("message", "hello"); fail(); } catch (AssertionError exception) { assertEquals("message expected null, but was:<hello>", exception.getMessage()); } } @Test public void same() { Object o1 = new Object(); assertSame(o1, o1); } @Test public void notSame() { Object o1 = new Object(); Object o2 = new Object(); assertNotSame(o1, o2); } @Test(expected = AssertionError.class) public void objectsNotSame() { assertSame(new Object(), new Object()); } @Test(expected = AssertionError.class) public void objectsAreSame() { Object o = new Object(); assertNotSame(o, o); } @Test public void sameWithMessage() { try { assertSame("not same", "hello", "good-bye"); fail(); } catch (AssertionError exception) { assertEquals("not same expected same:<hello> was not:<good-bye>", exception.getMessage()); } } @Test public void sameNullMessage() { try { assertSame("hello", "good-bye"); fail(); } catch (AssertionError exception) { assertEquals("expected same:<hello> was not:<good-bye>", exception.getMessage()); } } @Test public void notSameWithMessage() { Object o = new Object(); try { assertNotSame("message", o, o); fail(); } catch (AssertionError exception) { assertEquals("message expected not same", exception.getMessage()); } } @Test public void notSameNullMessage() { Object o = new Object(); try { assertNotSame(o, o); fail(); } catch (AssertionError exception) { assertEquals("expected not same", exception.getMessage()); } } @Test public void nullMessage() { try { fail(null); } catch (AssertionError exception) { // we used to expect getMessage() to return ""; see // failWithNoMessageToString() assertNull(exception.getMessage()); } } @Test public void nullMessageDisappearsWithStringAssertEquals() { try { assertEquals(null, "a", "b"); fail(); } catch (ComparisonFailure e) { assertEquals("expected:<[a]> but was:<[b]>", e.getMessage()); } } @Test public void nullMessageDisappearsWithAssertEquals() { try { assertEquals(null, 1, 2); fail(); } catch (AssertionError e) { assertEquals("expected:<1> but was:<2>", e.getMessage()); } } @Test(expected = AssertionError.class) public void arraysDeclaredAsObjectAreComparedAsObjects() { Object a1 = new Object[] { "abc" }; Object a2 = new Object[] { "abc" }; assertEquals(a1, a2); } @Test public void implicitTypecastEquality() { byte b = 1; short s = 1; int i = 1; long l = 1L; float f = 1.0f; double d = 1.0; assertEquals(b, s); assertEquals(b, i); assertEquals(b, l); assertEquals(s, i); assertEquals(s, l); assertEquals(i, l); assertEquals(f, d, 0); } @Test public void errorMessageDistinguishesDifferentValuesWithSameToString() { try { assertEquals("4", new Integer(4)); } catch (AssertionError e) { assertEquals( "expected: java.lang.String<4> but was: java.lang.Integer<4>", e.getMessage()); } } @Test public void assertThatIncludesDescriptionOfTestedValueInErrorMessage() { String expected = "expected"; String actual = "actual"; String expectedMessage = "identifier\nExpected: \"expected\"\n but: was \"actual\""; try { assertThat("identifier", actual, equalTo(expected)); } catch (AssertionError e) { assertEquals(expectedMessage, e.getMessage()); } } @Test public void assertThatIncludesAdvancedMismatch() { String expectedMessage = "identifier\nExpected: is an instance of java.lang.Integer\n but: \"actual\" is a java.lang.String"; try { assertThat("identifier", "actual", is(instanceOf(Integer.class))); } catch (AssertionError e) { assertEquals(expectedMessage, e.getMessage()); } } @Test public void assertThatDescriptionCanBeElided() { String expected = "expected"; String actual = "actual"; String expectedMessage = "\nExpected: \"expected\"\n but: was \"actual\""; try { assertThat(actual, equalTo(expected)); } catch (AssertionError e) { assertEquals(expectedMessage, e.getMessage()); } } @Test public void nullAndStringNullPrintCorrectError() { try { assertEquals(null, "null"); } catch (AssertionError e) { assertEquals("expected: null<null> but was: java.lang.String<null>", e.getMessage()); } } @Test(expected = AssertionError.class) public void stringNullAndNullWorksToo() { assertEquals("null", null); } @Test(expected = AssertionError.class) public void compareBigDecimalAndInteger() { final BigDecimal bigDecimal = new BigDecimal("1.2"); final Integer integer = Integer.valueOf("1"); assertEquals(bigDecimal, integer); } @Test(expected = AssertionError.class) public void sameObjectIsNotEqual() { Object o = new Object(); assertNotEquals(o, o); } @Test public void objectsWithDiferentReferencesAreNotEqual() { assertNotEquals(new Object(), new Object()); } @Test public void assertNotEqualsIncludesCorrectMessage() { Integer value1 = new Integer(1); Integer value2 = new Integer(1); String message = "The values should be different"; try { assertNotEquals(message, value1, value2); } catch (AssertionError e) { assertEquals(message + ". Actual: " + value1, e.getMessage()); return; } fail("Failed on assertion."); } @Test public void assertNotEqualsIncludesTheValueBeingTested() { Integer value1 = new Integer(1); Integer value2 = new Integer(1); try { assertNotEquals(value1, value2); } catch (AssertionError e) { assertTrue(e.getMessage().contains(value1.toString())); return; } fail("Failed on assertion."); } @Test public void assertNotEqualsWorksWithPrimitiveTypes() { assertNotEquals(1L, 2L); assertNotEquals("The values should be different", 1L, 2L); assertNotEquals(1.0, 2.0, 0); assertNotEquals("The values should be different", 1.0, 2.0, 0); assertNotEquals(1.0f, 2.0f, 0f); assertNotEquals("The values should be different", 1.0f, 2.0f, 0f); } @Test(expected = AssertionError.class) public void assertNotEqualsConsidersDeltaCorrectly() { assertNotEquals(1.0, 0.9, 0.1); } @Test(expected = AssertionError.class) public void assertNotEqualsConsidersFloatDeltaCorrectly() { assertNotEquals(1.0f, 0.75f, 0.25f); } @Test(expected = AssertionError.class) public void assertNotEqualsIgnoresDeltaOnNaN() { assertNotEquals(Double.NaN, Double.NaN, 1); } @Test(expected = AssertionError.class) public void assertNotEqualsIgnoresFloatDeltaOnNaN() { assertNotEquals(Float.NaN, Float.NaN, 1f); } @Test(expected = AssertionError.class) public void expectThrowsRequiresAnExceptionToBeThrown() { expectThrows(Throwable.class, nonThrowingRunnable()); } @Test public void expectThrowsIncludesAnInformativeDefaultMessage() { try { expectThrows(Throwable.class, nonThrowingRunnable()); } catch (AssertionError ex) { assertEquals( "expected Throwable to be thrown, but nothing was thrown", ex.getMessage()); return; } fail(); } @Test public void expectThrowsReturnsTheSameObjectThrown() { NullPointerException npe = new NullPointerException(); Throwable throwable = expectThrows(Throwable.class, throwingRunnable(npe)); assertSame(npe, throwable); } @Test(expected = AssertionError.class) public void expectThrowsDetectsTypeMismatchesViaExplicitTypeHint() { NullPointerException npe = new NullPointerException(); expectThrows(IOException.class, throwingRunnable(npe)); } @Test public void expectThrowsWrapsAndPropagatesUnexpectedExceptions() { NullPointerException npe = new NullPointerException("inner-message"); try { expectThrows(IOException.class, throwingRunnable(npe)); } catch (AssertionError ex) { assertSame(npe, ex.getCause()); assertEquals("inner-message", ex.getCause().getMessage()); return; } fail(); } @Test public void expectThrowsSuppliesACoherentErrorMessageUponTypeMismatch() { NullPointerException npe = new NullPointerException(); try { expectThrows(IOException.class, throwingRunnable(npe)); } catch (AssertionError error) { assertEquals( "unexpected exception type thrown; expected:<IOException> but was:<NullPointerException>", error.getMessage()); assertSame(npe, error.getCause()); return; } fail(); } private static ThrowingRunnable nonThrowingRunnable() { return new ThrowingRunnable() { public void run() throws Throwable { } }; } private static ThrowingRunnable throwingRunnable(final Throwable t) { return new ThrowingRunnable() { public void run() throws Throwable { throw t; } }; } }
package battlecode.world; import battlecode.common.*; import static battlecode.common.GameActionExceptionType.*; import battlecode.instrumenter.RobotDeathException; import battlecode.schema.Action; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * The actual implementation of RobotController. Its methods *must* be called * from a player thread. * * It is theoretically possible to have multiple for a single InternalRobot, but * that may cause problems in practice, and anyway why would you want to? * * All overriden methods should assertNotNull() all of their (Object) arguments, * if those objects are not explicitly stated to be nullable. */ public final strictfp class RobotControllerImpl implements RobotController { /** * The world the robot controlled by this controller inhabits. */ private final GameWorld gameWorld; /** * The robot this controller controls. */ private final InternalRobot robot; /** * Create a new RobotControllerImpl * * @param gameWorld the relevant world * @param robot the relevant robot */ public RobotControllerImpl(GameWorld gameWorld, InternalRobot robot) { this.gameWorld = gameWorld; this.robot = robot; } /** * @return the robot this controller is connected to */ public InternalRobot getRobot() { return robot; } /** * Throw a null pointer exception if an object is null. * * @param o the object to test */ private static void assertNotNull(Object o) { if (o == null) { throw new NullPointerException("Argument has an invalid null value"); } } @Override public int hashCode() { return robot.getID(); } @Override public int getRoundLimit(){ return gameWorld.getGameMap().getRounds(); } @Override public int getRoundNum(){ return gameWorld.getCurrentRound(); } @Override public float getTeamBullets(){ return gameWorld.getTeamInfo().getBulletSupply(getTeam()); } @Override public int getTeamVictoryPoints(){ return gameWorld.getTeamInfo().getVictoryPoints(getTeam()); } @Override public int getRobotCount(){ return gameWorld.getObjectInfo().getRobotCount(getTeam()); } @Override public int getTreeCount(){ return gameWorld.getObjectInfo().getTreeCount(getTeam()); } @Override public MapLocation[] getInitialArchonLocations(Team t){ assertNotNull(t); if (t == Team.NEUTRAL) { return new MapLocation[0]; } else { BodyInfo[] initialRobots = gameWorld.getGameMap().getInitialBodies(); ArrayList<MapLocation> archonLocs = new ArrayList<>(); for (BodyInfo initial : initialRobots) { if(initial.isRobot()){ RobotInfo robot = (RobotInfo) initial; if (robot.type == RobotType.ARCHON && robot.team == t) { archonLocs.add(robot.getLocation()); } } } MapLocation[] array = archonLocs.toArray(new MapLocation[archonLocs.size()]); Arrays.sort(array); return array; } } @Override public int getID(){ return this.robot.getID(); } @Override public Team getTeam(){ return this.robot.getTeam(); } @Override public RobotType getType(){ return this.robot.getType(); } @Override public MapLocation getLocation(){ return this.robot.getLocation(); } @Override public float getHealth(){ return this.robot.getHealth(); } @Override public int getAttackCount(){ return this.robot.getAttackCount(); } @Override public int getMoveCount(){ return this.robot.getMoveCount(); } private void assertCanSenseLocation(MapLocation loc) throws GameActionException{ if(!canSenseLocation(loc)){ throw new GameActionException(CANT_SENSE_THAT, "Target location not within sensor range"); } } private void assertCanSensePartOfCircle(MapLocation center, float radius) throws GameActionException{ if(!canSensePartOfCircle(center, radius)){ throw new GameActionException(CANT_SENSE_THAT, "Target circle not within sensor range"); } } private void assertCanSenseAllOfCircle(MapLocation center, float radius) throws GameActionException{ if(!canSenseAllOfCircle(center, radius)){ throw new GameActionException(CANT_SENSE_THAT, "Target circle not completely within sensor range"); } } @Override public boolean onTheMap(MapLocation loc) throws GameActionException { assertNotNull(loc); assertCanSenseLocation(loc); return gameWorld.getGameMap().onTheMap(loc); } @Override public boolean onTheMap(MapLocation center, float radius) throws GameActionException{ assertNotNull(center); assertCanSenseAllOfCircle(center, radius); return gameWorld.getGameMap().onTheMap(center, radius); } @Override public boolean canSenseLocation(MapLocation loc) { assertNotNull(loc); return this.robot.canSenseLocation(loc); } @Override public boolean canSensePartOfCircle(MapLocation center, float radius){ assertNotNull(center); MapLocation closestPointOnCircle = center.add(center.directionTo(getLocation()), radius); return canSenseLocation(closestPointOnCircle); } @Override public boolean canSenseAllOfCircle(MapLocation center, float radius){ assertNotNull(center); MapLocation furthestPointOnCircle = center.add(center.directionTo(getLocation()).opposite(), radius); return canSenseLocation(furthestPointOnCircle); } @Override public boolean isLocationOccupied(MapLocation loc) throws GameActionException { assertNotNull(loc); assertCanSenseLocation(loc); return !gameWorld.getObjectInfo().isEmpty(loc, 0); } @Override public boolean isLocationOccupiedByTree(MapLocation loc) throws GameActionException { assertNotNull(loc); assertCanSenseLocation(loc); return gameWorld.getObjectInfo().getTreeAtLocation(loc) != null; } @Override public boolean isLocationOccupiedByRobot(MapLocation loc) throws GameActionException { assertNotNull(loc); assertCanSenseLocation(loc); return gameWorld.getObjectInfo().getRobotAtLocation(loc) != null; } @Override public boolean isCircleOccupied(MapLocation center, float radius) throws GameActionException{ assertNotNull(center); assertCanSenseAllOfCircle(center, radius); return !gameWorld.getObjectInfo().isEmpty(center, radius); } @Override public boolean isCircleOccupiedExceptByThisRobot(MapLocation center, float radius) throws GameActionException{ assertNotNull(center); assertCanSenseAllOfCircle(center, radius); return !gameWorld.getObjectInfo().isEmptyExceptForRobot(center, radius, robot); } @Override public TreeInfo senseTreeAtLocation(MapLocation loc) throws GameActionException { assertNotNull(loc); assertCanSenseLocation(loc); InternalTree tree = gameWorld.getObjectInfo().getTreeAtLocation(loc); if(tree != null) { return tree.getTreeInfo(); } return null; } @Override public RobotInfo senseRobotAtLocation(MapLocation loc) throws GameActionException { assertNotNull(loc); assertCanSenseLocation(loc); InternalRobot bot = gameWorld.getObjectInfo().getRobotAtLocation(loc); if(bot != null) { return bot.getRobotInfo(); } return null; } @Override public boolean canSenseTree(int id) { if(!gameWorld.getObjectInfo().existsTree(id)){ return false; } InternalTree tree = gameWorld.getObjectInfo().getTreeByID(id); return canSensePartOfCircle(tree.getLocation(), tree.getRadius()); } @Override public boolean canSenseRobot(int id) { if(!gameWorld.getObjectInfo().existsRobot(id)){ return false; } InternalRobot robot = gameWorld.getObjectInfo().getRobotByID(id); return canSensePartOfCircle(robot.getLocation(), robot.getType().bodyRadius); } @Override public boolean canSenseBullet(int id) { return gameWorld.getObjectInfo().existsBullet(id) && canSenseLocation(gameWorld.getObjectInfo().getBulletByID(id).getLocation()); } @Override public TreeInfo senseTree(int id) throws GameActionException { if(!canSenseTree(id)){ throw new GameActionException(CANT_SENSE_THAT, "Can't sense given tree; It may not exist anymore"); } return gameWorld.getObjectInfo().getTreeByID(id).getTreeInfo(); } @Override public RobotInfo senseRobot(int id) throws GameActionException { if(!canSenseRobot(id)){ throw new GameActionException(CANT_SENSE_THAT, "Can't sense given robot; It may not exist anymore"); } return gameWorld.getObjectInfo().getRobotByID(id).getRobotInfo(); } @Override public BulletInfo senseBullet(int id) throws GameActionException { if(!canSenseBullet(id)){ throw new GameActionException(CANT_SENSE_THAT, "Can't sense given bullet; It may not exist anymore"); } return gameWorld.getObjectInfo().getBulletByID(id).getBulletInfo(); } @Override public RobotInfo[] senseNearbyRobots() { return senseNearbyRobots(-1); } @Override public RobotInfo[] senseNearbyRobots(float radius) { return senseNearbyRobots(radius, null); } @Override public RobotInfo[] senseNearbyRobots(float radius, Team team) { return senseNearbyRobots(getLocation(), radius, team); } @Override public RobotInfo[] senseNearbyRobots(MapLocation center, float radius, Team team) { assertNotNull(center); InternalRobot[] allSensedRobots = gameWorld.getObjectInfo().getAllRobotsWithinRadius(center, radius == -1 ? getType().sensorRadius : radius); List<RobotInfo> validSensedRobots = new ArrayList<>(); for(InternalRobot sensedRobot : allSensedRobots){ // check if this robot if(sensedRobot.equals(this.robot)){ continue; } // check if can sense if(!canSensePartOfCircle(sensedRobot.getLocation(), sensedRobot.getType().bodyRadius)){ continue; } // check if right team if(team != null && sensedRobot.getTeam() != team){ continue; } validSensedRobots.add(sensedRobot.getRobotInfo()); } return validSensedRobots.toArray(new RobotInfo[validSensedRobots.size()]); } @Override public TreeInfo[] senseNearbyTrees() { return senseNearbyTrees(-1); } @Override public TreeInfo[] senseNearbyTrees(float radius) { return senseNearbyTrees(radius, null); } @Override public TreeInfo[] senseNearbyTrees(float radius, Team team) { return senseNearbyTrees(getLocation(), radius, team); } @Override public TreeInfo[] senseNearbyTrees(MapLocation center, float radius, Team team) { assertNotNull(center); InternalTree[] allSensedTrees = gameWorld.getObjectInfo().getAllTreesWithinRadius(center, radius == -1 ? getType().sensorRadius : radius); List<TreeInfo> validSensedTrees = new ArrayList<>(); for(InternalTree sensedTree : allSensedTrees){ // check if can sense if(!canSensePartOfCircle(sensedTree.getLocation(), sensedTree.getRadius())){ continue; } // check if right team if(team != null && sensedTree.getTeam() != team){ continue; } validSensedTrees.add(sensedTree.getTreeInfo()); } return validSensedTrees.toArray(new TreeInfo[validSensedTrees.size()]); } @Override public BulletInfo[] senseNearbyBullets() { return senseNearbyBullets(-1); } @Override public BulletInfo[] senseNearbyBullets(float radius) { return senseNearbyBullets(getLocation(), radius); } @Override public BulletInfo[] senseNearbyBullets(MapLocation center, float radius) { assertNotNull(center); InternalBullet[] allSensedBullets = gameWorld.getObjectInfo().getAllBulletsWithinRadius(center, radius == -1 ? getType().sensorRadius : radius); List<BulletInfo> validSensedBullets = new ArrayList<>(); for(InternalBullet sensedBullet : allSensedBullets){ // check if can sense if(!canSenseLocation(sensedBullet.getLocation())){ continue; } validSensedBullets.add(sensedBullet.getBulletInfo()); } return validSensedBullets.toArray(new BulletInfo[validSensedBullets.size()]); } @Override public MapLocation[] senseBroadcastingRobotLocations() { List<MapLocation> validLocs = new ArrayList<>(); for(RobotInfo robot : gameWorld.getPreviousBroadcasters()){ validLocs.add(robot.location); } return validLocs.toArray(new MapLocation[validLocs.size()]); } private void assertMoveReady() throws GameActionException{ if(hasMoved()){ throw new GameActionException(NOT_ACTIVE, "This robot has already moved this turn."); } } private void assertIsWeaponReady() throws GameActionException{ if(hasAttacked()){ throw new GameActionException(NOT_ACTIVE, "This robot has already attacked this turn."); } } private void assertIsBuildReady() throws GameActionException{ if(!isBuildReady()){ throw new GameActionException(NOT_ACTIVE, "This robot's build cooldown has not expired."); } } @Override public boolean hasMoved() { return getMoveCount() > 0; } @Override public boolean hasAttacked() { return getAttackCount() > 0; } @Override public boolean isBuildReady() { return this.robot.getBuildCooldownTurns() == 0; } /* private void assertIsPathable(MapLocation loc) throws GameActionException{ if(!onTheMap(loc, getType().bodyRadius) || isCircleOccupiedExceptByThisRobot(loc, getType().bodyRadius)){ throw new GameActionException(CANT_MOVE_THERE, "Cannot move to target location " + loc + "."); } }*/ private void assertCanMove(MapLocation loc) throws GameActionException{ if(!canMove(loc)) throw new GameActionException(CANT_MOVE_THERE, "Cannot move to the target location " + loc +"."); } @Override public boolean canMove(Direction dir) { return canMove(dir, getType().strideRadius); } @Override public boolean canMove(Direction dir, float dist) { assertNotNull(dir); dist = Math.max(0, Math.min(dist, getType().strideRadius)); MapLocation center = getLocation().add(dir, dist); return canMove(center); } @Override public boolean canMove(MapLocation center) { assertNotNull(center); float dist = getLocation().distanceTo(center); if(dist > getType().strideRadius) { Direction dir = getLocation().directionTo(center); center = getLocation().add(dir, getType().strideRadius); } boolean newLocationIsEmpty; if(getType() != RobotType.TANK && getType() != RobotType.SCOUT) { newLocationIsEmpty = gameWorld.getObjectInfo().isEmptyExceptForRobot(center, getType().bodyRadius, robot); } else { // Tanks have special condition due to body attack, Scouts can just go over trees newLocationIsEmpty = gameWorld.getObjectInfo().noRobotsExceptForRobot(center, getType().bodyRadius, robot); } return gameWorld.getGameMap().onTheMap(center, getType().bodyRadius) && newLocationIsEmpty; } @Override public void move(Direction dir) throws GameActionException { move(dir, getType().strideRadius); } @Override public void move(Direction dir, float dist) throws GameActionException { assertNotNull(dir); assertMoveReady(); dist = Math.max(0, Math.min(dist, getType().strideRadius)); MapLocation center = getLocation().add(dir, dist); move(center); } @Override public void move(MapLocation center) throws GameActionException { assertNotNull(center); assertMoveReady(); float dist = getLocation().distanceTo(center); if(dist > getType().strideRadius) { Direction dir = getLocation().directionTo(center); center = getLocation().add(dir, getType().strideRadius); } assertCanMove(center); this.robot.incrementMoveCount(); if(getType() == RobotType.TANK) { // If Tank, see if can actually move, as opposed to just body attack InternalTree[] trees = gameWorld.getObjectInfo().getAllTreesWithinRadius(center, RobotType.TANK.bodyRadius); if(trees.length > 0) { // Body attack will happen // Find closest Tree InternalTree closestTree = null; float closestDist = Float.MAX_VALUE; for(InternalTree tree : trees) { float treeDist = tree.getLocation().distanceTo(robot.getLocation()); if(treeDist < closestDist) { closestDist = treeDist; closestTree = tree; } } // Damage the closest tree closestTree.damageTree(GameConstants.TANK_BODY_DAMAGE, getTeam(), false); // Now that damage has been done, refresh list of trees to see if it is still there trees = gameWorld.getObjectInfo().getAllTreesWithinRadius(center, RobotType.TANK.bodyRadius); if(trees.length > 0) // If something still obstructs the movement, don't actually move return; } } this.robot.setLocation(center); gameWorld.getMatchMaker().addMoved(getID(), getLocation()); } // TODO: Make this a player method private boolean haveBulletCosts(float cost){ return gameWorld.getTeamInfo().getBulletSupply(getTeam()) >= cost; } private void assertHaveBulletCosts(float cost) throws GameActionException{ if(!haveBulletCosts(cost)){ throw new GameActionException(NOT_ENOUGH_RESOURCE, "Not sufficient funds in bullet supply"); } } private void assertNonNegative(float cost) throws GameActionException{ if(cost < 0) { throw new GameActionException(CANT_DO_THAT, "Can't purchase negative victory points"); } } /** * Fires specified bullet spread. Assumes odd number of bullets to fire. * * @param centerDir direction the center bullet should travel * @param toFire number of bullets to fire. * @param spreadDegrees the spread in degrees at which the bullets should fire. */ private void fireBulletSpread(Direction centerDir, int toFire, float spreadDegrees){ byte actionType; switch (toFire){ case 5: actionType = Action.FIRE_PENTAD; break; case 3: actionType = Action.FIRE_TRIAD; break; default: actionType = Action.FIRE; } int bulletsPerSide = (toFire - 1) / 2; // Fire center bullet int bulletID = gameWorld.spawnBullet(getTeam(), getType().bulletSpeed, getType().attackPower, getLocation().add(centerDir, getType().bodyRadius + GameConstants.BULLET_SPAWN_OFFSET), centerDir); gameWorld.getMatchMaker().addAction(getID(), actionType, bulletID); // Fire side bullets for(int i = 1; i <= bulletsPerSide; i++){ // Fire left bullet Direction dirLeft = centerDir.rotateLeftDegrees(i * spreadDegrees); bulletID = gameWorld.spawnBullet(getTeam(), getType().bulletSpeed, getType().attackPower, getLocation().add(dirLeft, getType().bodyRadius + GameConstants.BULLET_SPAWN_OFFSET), dirLeft); gameWorld.getMatchMaker().addAction(getID(), actionType, bulletID); // Fire right bullet Direction dirRight = centerDir.rotateRightDegrees(i * spreadDegrees); bulletID = gameWorld.spawnBullet(getTeam(), getType().bulletSpeed, getType().attackPower, getLocation().add(dirRight, getType().bodyRadius + GameConstants.BULLET_SPAWN_OFFSET), dirRight); gameWorld.getMatchMaker().addAction(getID(), actionType, bulletID); } } @Override public boolean canStrike() { boolean correctType = getType() == RobotType.LUMBERJACK; return correctType && !hasAttacked(); } @Override public void strike() throws GameActionException { if(getType() != RobotType.LUMBERJACK){ throw new GameActionException(CANT_DO_THAT, "Only lumberjacks can strike"); } assertIsWeaponReady(); this.robot.incrementAttackCount(); // Striking counts as attack. // Hit adjacent robots for(InternalRobot hitRobot : gameWorld.getObjectInfo().getAllRobotsWithinRadius(getLocation(), RobotType.LUMBERJACK.bodyRadius + GameConstants.LUMBERJACK_STRIKE_RADIUS)){ if(hitRobot.equals(this.robot)){ continue; } hitRobot.damageRobot(getType().attackPower); } // Hit adjacent trees for(InternalTree hitTree : gameWorld.getObjectInfo().getAllTreesWithinRadius(getLocation(), RobotType.LUMBERJACK.bodyRadius + GameConstants.LUMBERJACK_STRIKE_RADIUS)){ hitTree.damageTree(getType().attackPower, getTeam(), false); } gameWorld.getMatchMaker().addAction(getID(), Action.LUMBERJACK_STRIKE, -1); } @Override public boolean canFireSingleShot() { boolean correctType = getType() != RobotType.ARCHON && getType() != RobotType.GARDENER && getType() != RobotType.LUMBERJACK; return correctType && haveBulletCosts(GameConstants.SINGLE_SHOT_COST) && !hasAttacked(); } @Override public boolean canFireTriadShot() { boolean correctType = getType() != RobotType.ARCHON && getType() != RobotType.GARDENER && getType() != RobotType.LUMBERJACK && getType() != RobotType.SCOUT; return correctType && haveBulletCosts(GameConstants.TRIAD_SHOT_COST) && !hasAttacked(); } @Override public boolean canFirePentadShot() { boolean correctType = getType() != RobotType.ARCHON && getType() != RobotType.GARDENER && getType() != RobotType.LUMBERJACK && getType() != RobotType.SCOUT; return correctType && haveBulletCosts(GameConstants.PENTAD_SHOT_COST) && !hasAttacked(); } @Override public void fireSingleShot(Direction dir) throws GameActionException { assertNotNull(dir); assertIsWeaponReady(); if(!canFireSingleShot()){ throw new GameActionException(CANT_DO_THAT, "This robot cannot fire a single shot possibly due to wrong type or " + "insufficient funds"); } this.robot.incrementAttackCount(); gameWorld.getTeamInfo().adjustBulletSupply(getTeam(), -GameConstants.SINGLE_SHOT_COST); fireBulletSpread(dir, 1, 0); } @Override public void fireTriadShot(Direction dir) throws GameActionException { assertNotNull(dir); assertIsWeaponReady(); if(!canFireTriadShot()){ throw new GameActionException(CANT_DO_THAT, "This robot cannot fire a triad shot possibly due to wrong type or " + "insufficient funds"); } this.robot.incrementAttackCount(); gameWorld.getTeamInfo().adjustBulletSupply(getTeam(), -GameConstants.TRIAD_SHOT_COST); fireBulletSpread(dir, 3, GameConstants.TRIAD_SPREAD_DEGREES); } @Override public void firePentadShot(Direction dir) throws GameActionException { assertNotNull(dir); assertIsWeaponReady(); if(!canFirePentadShot()){ throw new GameActionException(CANT_DO_THAT, "This robot cannot fire a pentad shot possibly due to wrong type or " + "insufficient funds"); } this.robot.incrementAttackCount(); gameWorld.getTeamInfo().adjustBulletSupply(getTeam(), -GameConstants.PENTAD_SHOT_COST); fireBulletSpread(dir, 5, GameConstants.PENTAD_SPREAD_DEGREES); } private boolean canInteractWithLocation(MapLocation loc){ assertNotNull(loc); return this.robot.canInteractWithLocation(loc); } private boolean canInteractWithCircle(MapLocation center, float radius){ assertNotNull(center); return canInteractWithLocation(center.add(center.directionTo(getLocation()), radius)); } private void assertCanWater() throws GameActionException{ if(!canWater()){ throw new GameActionException(CANT_DO_THAT, "Gardeners can only water once per turn"); } } private void assertCanShake() throws GameActionException{ if(!canShake()){ throw new GameActionException(CANT_DO_THAT, "Robots can only shake one tree per turn"); } } private void assertCanInteractWithTree(MapLocation treeLoc) throws GameActionException{ if(!canInteractWithTree(treeLoc)){ throw new GameActionException(CANT_DO_THAT, "Can't interact with a tree that doesn't exist or is outside" + " this robot's stride."); } } private void assertCanInteractWithTree(int treeID) throws GameActionException{ if(!canInteractWithTree(treeID)){ throw new GameActionException(CANT_DO_THAT, "Can't interact with a tree that doesn't exist or is outside" + " this robot's stride."); } } private void assertOwnedTree(InternalTree tree) throws GameActionException { if(tree.getTeam().equals(Team.NEUTRAL)) { throw new GameActionException(CANT_DO_THAT, "Can't water a neutral tree."); } } @Override public boolean canChop(MapLocation loc) { boolean correctType = (getType() == RobotType.LUMBERJACK); boolean canInteract = canInteractWithTree(loc); return correctType && canInteract; } @Override public boolean canChop(int id) { boolean correctType = (getType() == RobotType.LUMBERJACK); boolean canInteract = canInteractWithTree(id); return correctType && canInteract; } @Override public void chop(MapLocation loc) throws GameActionException { if(getType() != RobotType.LUMBERJACK){ throw new GameActionException(CANT_DO_THAT, "Only lumberjacks can chop"); } assertNotNull(loc); assertIsWeaponReady(); // Chop counts as attack assertCanInteractWithTree(loc); InternalTree tree = gameWorld.getObjectInfo().getTreeAtLocation(loc); chopTree(tree); } @Override public void chop(int id) throws GameActionException { if(getType() != RobotType.LUMBERJACK){ throw new GameActionException(CANT_DO_THAT, "Only lumberjacks can chop"); } assertIsWeaponReady(); // Chop counts as attack assertCanInteractWithTree(id); InternalTree tree = gameWorld.getObjectInfo().getTreeByID(id); chopTree(tree); } private void chopTree(InternalTree tree){ this.robot.incrementAttackCount(); // Chopping counts as attack float chopDamage = GameConstants.LUMBERJACK_CHOP_DAMAGE; tree.damageTree(chopDamage, getTeam(), true); gameWorld.getMatchMaker().addAction(getID(), Action.CHOP, -1); } @Override public boolean canShake(MapLocation loc) { return canInteractWithTree(loc) && canShake(); } @Override public boolean canShake(int id) { return canInteractWithTree(id) && canShake(); } @Override public void shake(MapLocation loc) throws GameActionException { assertNotNull(loc); assertCanShake(); assertCanInteractWithTree(loc); InternalTree tree = gameWorld.getObjectInfo().getTreeAtLocation(loc); shakeTree(tree); } @Override public void shake(int id) throws GameActionException { assertCanShake(); assertCanInteractWithTree(id); InternalTree tree = gameWorld.getObjectInfo().getTreeByID(id); shakeTree(tree); } private void shakeTree(InternalTree tree){ this.robot.incrementShakeCount(); gameWorld.getTeamInfo().adjustBulletSupply(getTeam(), tree.getContainedBullets()); tree.resetContainedBullets(); gameWorld.getMatchMaker().addAction(getID(), Action.SHAKE_TREE, tree.getID()); } @Override public boolean canWater(MapLocation loc) { return canWater() && canInteractWithTree(loc); } @Override public boolean canWater(int id) { return canWater() && canInteractWithTree(id); } @Override public void water(MapLocation loc) throws GameActionException { assertNotNull(loc); assertCanWater(); assertCanInteractWithTree(loc); InternalTree tree = gameWorld.getObjectInfo().getTreeAtLocation(loc); assertOwnedTree(tree); waterTree(tree); } @Override public void water(int id) throws GameActionException { assertCanWater(); assertCanInteractWithTree(id); InternalTree tree = gameWorld.getObjectInfo().getTreeByID(id); assertOwnedTree(tree); waterTree(tree); } private void waterTree(InternalTree tree){ this.robot.incrementWaterCount(); tree.waterTree(); gameWorld.getMatchMaker().addAction(getID(), Action.WATER_TREE, tree.getID()); } @Override public boolean canWater(){ boolean correctType = getType() == RobotType.GARDENER; return correctType && this.robot.getWaterCount() < 1; } @Override public boolean canShake(){ return this.robot.getShakeCount() < 1; } @Override public boolean canInteractWithTree(MapLocation loc){ assertNotNull(loc); InternalTree tree = gameWorld.getObjectInfo().getTreeAtLocation(loc); return tree != null && canInteractWithCircle(tree.getLocation(), tree.getRadius()); } @Override public boolean canInteractWithTree(int id){ InternalTree tree = gameWorld.getObjectInfo().getTreeByID(id); return tree != null && canInteractWithCircle(tree.getLocation(), tree.getRadius()); } private void assertValidChannel(int channel) throws GameActionException{ if(channel < 0 || channel >= GameConstants.BROADCAST_MAX_CHANNELS){ throw new GameActionException(CANT_DO_THAT, "Broadcasting channel invalid"); } } @Override public void broadcast(int channel, int data) throws GameActionException { assertValidChannel(channel); gameWorld.addBroadcaster(this.robot.getRobotInfo()); gameWorld.getTeamInfo().broadcast(getTeam(), channel, data); } @Override public int readBroadcast(int channel) throws GameActionException { assertValidChannel(channel); return gameWorld.getTeamInfo().readBroadcast(getTeam(), channel); } private void assertCanBuildRobot(RobotType type, Direction dir) throws GameActionException{ if(!canBuildRobot(type, dir)){ throw new GameActionException(CANT_DO_THAT, "Can't build desired robot in given direction, possibly due to " + "insufficient bullet supply, this robot can't build, " + "cooldown not expired, or the spawn location is occupied"); } } private void assertCanBuildTree(Direction dir) throws GameActionException{ if(!canPlantTree(dir)){ throw new GameActionException(CANT_DO_THAT, "Can't build a bullet tree in given direction, possibly due to " + "insufficient bullet supply, this robot can't build, " + "cooldown not expired, or the spawn location is occupied"); } } @Override public boolean hasRobotBuildRequirements(RobotType type) { assertNotNull(type); boolean hasBulletCosts = haveBulletCosts(type.bulletCost); boolean validBuilder = getType() == type.spawnSource; return hasBulletCosts && validBuilder; } @Override public boolean hasTreeBuildRequirements() { boolean hasBulletCosts = haveBulletCosts(GameConstants.BULLET_TREE_COST); boolean validBuilder = getType() == RobotType.GARDENER; return hasBulletCosts && validBuilder; } @Override public boolean canBuildRobot(RobotType type, Direction dir) { assertNotNull(type); assertNotNull(dir); boolean hasBuildRequirements = hasRobotBuildRequirements(type); float spawnDist = getType().bodyRadius + GameConstants.GENERAL_SPAWN_OFFSET + type.bodyRadius; MapLocation spawnLoc = getLocation().add(dir, spawnDist); boolean isClear = gameWorld.getGameMap().onTheMap(spawnLoc, type.bodyRadius) && gameWorld.getObjectInfo().isEmpty(spawnLoc, type.bodyRadius); boolean cooldownExpired = isBuildReady(); return hasBuildRequirements && isClear && cooldownExpired; } @Override public boolean canPlantTree(Direction dir) { assertNotNull(dir); boolean hasBuildRequirements = hasTreeBuildRequirements(); float spawnDist = getType().bodyRadius + GameConstants.GENERAL_SPAWN_OFFSET + GameConstants.BULLET_TREE_RADIUS; MapLocation spawnLoc = getLocation().add(dir, spawnDist); boolean isClear = gameWorld.getGameMap().onTheMap(spawnLoc, GameConstants.BULLET_TREE_RADIUS) && gameWorld.getObjectInfo().isEmpty(spawnLoc, GameConstants.BULLET_TREE_RADIUS); boolean cooldownExpired = isBuildReady(); return hasBuildRequirements && isClear && cooldownExpired; } @Override public boolean canHireGardener(Direction dir) { return canBuildRobot(RobotType.GARDENER,dir); } @Override public void hireGardener(Direction dir) throws GameActionException { assertNotNull(dir); assertCanBuildRobot(RobotType.GARDENER, dir); this.robot.setBuildCooldownTurns(RobotType.GARDENER.buildCooldownTurns); gameWorld.getTeamInfo().adjustBulletSupply(getTeam(), -RobotType.GARDENER.bulletCost); float spawnDist = getType().bodyRadius + GameConstants.GENERAL_SPAWN_OFFSET + RobotType.GARDENER.bodyRadius; MapLocation spawnLoc = getLocation().add(dir, spawnDist); int robotID = gameWorld.spawnRobot(RobotType.GARDENER, spawnLoc, getTeam()); gameWorld.getMatchMaker().addAction(getID(), Action.SPAWN_UNIT, robotID); } @Override public void buildRobot(RobotType type, Direction dir) throws GameActionException { assertNotNull(dir); assertCanBuildRobot(type, dir); this.robot.setBuildCooldownTurns(type.buildCooldownTurns); gameWorld.getTeamInfo().adjustBulletSupply(getTeam(), -type.bulletCost); float spawnDist = getType().bodyRadius + GameConstants.GENERAL_SPAWN_OFFSET + type.bodyRadius; MapLocation spawnLoc = getLocation().add(dir, spawnDist); int robotID = gameWorld.spawnRobot(type, spawnLoc, getTeam()); gameWorld.getMatchMaker().addAction(getID(), Action.SPAWN_UNIT, robotID); } @Override public void plantTree(Direction dir) throws GameActionException { assertNotNull(dir); assertIsBuildReady(); assertCanBuildTree(dir); this.robot.setBuildCooldownTurns(GameConstants.BULLET_TREE_CONSTRUCTION_COOLDOWN); gameWorld.getTeamInfo().adjustBulletSupply(getTeam(), -GameConstants.BULLET_TREE_COST); float spawnDist = getType().bodyRadius + GameConstants.GENERAL_SPAWN_OFFSET + GameConstants.BULLET_TREE_RADIUS; MapLocation spawnLoc = getLocation().add(dir, spawnDist); int treeID = gameWorld.spawnTree(getTeam(), GameConstants.BULLET_TREE_RADIUS, spawnLoc, 0, null); gameWorld.getMatchMaker().addAction(getID(), Action.PLANT_TREE, treeID); } private void assertCanInteractWithRobot(MapLocation robotLoc) throws GameActionException{ if(!canInteractWithRobot(robotLoc)){ throw new GameActionException(CANT_DO_THAT, "Can't interact with a robot that doesn't exist or is outside" + " this robot's stride."); } } private void assertCanInteractWithRobot(int robotID) throws GameActionException{ if(!canInteractWithRobot(robotID)){ throw new GameActionException(CANT_DO_THAT, "Can't interact with a robot that doesn't exist or is outside" + " this robot's stride."); } } @Override public void donate(float bullets) throws GameActionException{ assertNonNegative(bullets); assertHaveBulletCosts(bullets); int gainedVictorPoints = (int)bullets / GameConstants.BULLET_EXCHANGE_RATE; gameWorld.getTeamInfo().adjustBulletSupply(getTeam(), -bullets); gameWorld.getTeamInfo().adjustVictoryPoints(getTeam(), gainedVictorPoints); } @Override public boolean canInteractWithRobot(MapLocation loc){ assertNotNull(loc); InternalRobot robot = gameWorld.getObjectInfo().getRobotAtLocation(loc); return robot != null && canInteractWithCircle(robot.getLocation(), robot.getType().bodyRadius); } @Override public boolean canInteractWithRobot(int id){ InternalRobot robot = gameWorld.getObjectInfo().getRobotByID(id); return robot != null && canInteractWithCircle(robot.getLocation(), robot.getType().bodyRadius); } @Override public void disintegrate(){ throw new RobotDeathException(); } @Override public void resign(){ gameWorld.getObjectInfo().eachRobot((robot) -> { if(robot.getTeam() == getTeam()){ gameWorld.destroyRobot(robot.getID()); } return true; }); } @Override public void setIndicatorDot(MapLocation loc, int red, int green, int blue) throws GameActionException { assertNotNull(loc); if (!gameWorld.getGameMap().onTheMap(loc)) { throw new GameActionException(OUT_OF_RANGE, "Location is not on the map"); } gameWorld.getMatchMaker().addIndicatorDot(getID(), loc, red, green, blue); } @Override public void setIndicatorLine(MapLocation startLoc, MapLocation endLoc, int red, int green, int blue) throws GameActionException { assertNotNull(startLoc); assertNotNull(endLoc); if (!gameWorld.getGameMap().onTheMap(startLoc) || !gameWorld.getGameMap().onTheMap(endLoc)) { throw new GameActionException(OUT_OF_RANGE, "Start or end location is not on the map"); } gameWorld.getMatchMaker().addIndicatorLine(getID(), startLoc, endLoc, red, green, blue); } @Override public void setTeamMemory(int index, long value) { gameWorld.getTeamInfo().setTeamMemory(robot.getTeam(), index, value); } @Override public void setTeamMemory(int index, long value, long mask) { gameWorld.getTeamInfo().setTeamMemory(robot.getTeam(), index, value, mask); } @Override public long[] getTeamMemory() { long[] arr = gameWorld.getTeamInfo().getOldTeamMemory()[robot.getTeam().ordinal()]; return Arrays.copyOf(arr, arr.length); } @Override public long getControlBits() { return robot.getControlBits(); } }
package org.nutz.el.impl.loader; import java.io.IOException; import java.io.Reader; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.List; import org.nutz.el.El; import org.nutz.el.ElOperator; import org.nutz.el.ElSymbol; import org.nutz.el.ElSymbolType; import org.nutz.el.ann.OptHidden; import org.nutz.el.opt.AbstractOperator; import org.nutz.lang.Lang; import org.nutz.resource.Scans; public class OptSymbolLoader extends AbstractSymbolLoader { private OptNode root; private OptNode cursor; private static List<Class<?>> optTypes = new ArrayList<Class<?>>(); @SuppressWarnings("unchecked") public OptSymbolLoader() { root = new OptNode(); if (optTypes.size() == 0) { synchronized (optTypes) { if (optTypes.size() == 0) { List<Class<?>> optTypes = Scans.me().scanPackage(AbstractOperator.class); for (Class<?> optType : optTypes) { if (!Modifier.isAbstract(optType.getModifiers()) && ElOperator.class.isAssignableFrom(optType) && null == optType.getAnnotation(OptHidden.class)) { OptSymbolLoader.optTypes.add(optType); } } } } } for (Class<?> optType : optTypes) { Class<? extends ElOperator> theType = (Class<? extends ElOperator>) optType; ElOperator optObj = El.opt(theType); char[] cs = optObj.getString().toCharArray(); OptNode on = root; for (char c : cs) { on = on.addNode(c); } if (on.getOperator() != null) { throw Lang.makeThrow( "Operator '%s' and '%s' has duplicate '@Opt'", on.getOperator().getClass().getName(), optType.getName()); } on.setOperator(optObj); } } public boolean isMyTurn(ElSymbol prev, int c) { if (prev != null && prev.getType() == ElSymbolType.OPT) return false; cursor = root.getChild((char) c); return null != cursor; } public int load(Reader reader) throws IOException { int c; while (-1 != (c = reader.read())) { OptNode on = cursor.getChild((char) c); if (null == on) { break; } else { cursor = on; } } symbol = new ElSymbol().setType(ElSymbolType.OPT).setObj(cursor.getOperator()); return c; } }
package org.ndexbio.common.persistence; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonParser; import com.google.common.io.Files; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.List; import org.ndexbio.cxio.metadata.MetaDataCollection; /** * Contains some utility functions needed by the tests * @author churas */ public class TestUtil { /** * Creates ndex.properties config as a string * @param ndexrootpath value to set for NdexRoot= * @return */ public static String getConfigAsString(final String ndexrootpath) { StringBuilder sb = new StringBuilder(); sb.append("NdexDBURL=somedburl\n"); sb.append("NdexSystemUser=sysuser\n"); sb.append("NdexSystemUserPassword=hithere\n"); sb.append("NdexRoot="); sb.append(ndexrootpath); sb.append("\nHostURI=http://localhost\n"); return sb.toString(); } /** * Writes configuration file to path specified with root path specified * @param outPath * @param ndexrootpath * @throws IOException */ public static void writeSimpleConfigToFile(final String outPath, final String ndexrootpath) throws IOException { try (BufferedWriter bw = new BufferedWriter(new FileWriter(outPath))){ bw.write(TestUtil.getConfigAsString(ndexrootpath)); bw.flush(); } } /** * Copies aspect files from resource path passed in to destPath directory * @param resourceDirName resource path ie /wntsingaling * @param destPath destination directory * @throws Exception */ public static void copyNetworkAspects(Class theClass, final String resourceDirName, List<String> aspects, final String destPath) throws Exception { File aspectDir = new File(destPath + File.separator + CXNetworkLoader.CX1AspectDir); aspectDir.mkdirs(); for (String aspectName : aspects){ try { String filePath = theClass.getResource(resourceDirName + "/" + aspectName).getFile(); File srcFile = new File(filePath); Files.copy(srcFile, new File(aspectDir.getAbsolutePath() + File.separator + srcFile.getName())); } catch(NullPointerException npe){ // ignore cases where a resource is missing, the unit test will // probably catch it and some datasets are missing the files // intentionally } } } /** * Loads resource passed in as a metadatacollection * @param metaDataResource resource to load ie /wntsignaling/metadata * @return MetaDataCollection object * @throws Exception */ public static MetaDataCollection getNetworkMetaData(Class theClass, final String metaDataResource) throws Exception { JsonFactory jf = new JsonFactory(); JsonParser jp = jf.createParser(new File(theClass.getResource(metaDataResource).getFile())); return MetaDataCollection.createInstanceFromJson(jp); } }
package org.openid4java.message.ax; import org.openid4java.message.MessageException; import org.openid4java.message.Parameter; import org.openid4java.message.ParameterList; import java.net.URL; import java.net.MalformedURLException; import java.util.Map; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import org.apache.log4j.Logger; /** * Implements the extension for Attribute Exchange fetch requests. * * @see AxMessage Message * @author Marius Scurtescu, Johnny Bufu */ public class FetchRequest extends AxMessage { private static Logger _log = Logger.getLogger(FetchRequest.class); private static final boolean DEBUG = _log.isDebugEnabled(); /** * Constructs a Fetch Request with an empty parameter list. */ protected FetchRequest() { _parameters.set(new Parameter("mode", "fetch_request")); if (DEBUG) _log.debug("Created empty fetch request."); } /** * Constructs a Fetch Request with an empty parameter list. */ public static FetchRequest createFetchRequest() { return new FetchRequest(); } /** * Constructs a FetchRequest from a parameter list. * <p> * The parameter list can be extracted from a received message with the * getExtensionParams method of the Message class, and MUST NOT contain * the "openid.<extension_alias>." prefix. */ protected FetchRequest(ParameterList params) { _parameters = params; } /** * Constructs a FetchRequest from a parameter list. * <p> * The parameter list can be extracted from a received message with the * getExtensionParams method of the Message class, and MUST NOT contain * the "openid.<extension_alias>." prefix. */ public static FetchRequest createFetchRequest(ParameterList params) throws MessageException { FetchRequest req = new FetchRequest(params); if (! req.isValid()) throw new MessageException("Invalid parameters for a fetch request"); if (DEBUG) _log.debug("Created fetch request from parameter list:\n" + params); return req; } /** * Adds an attribute to the fetch request. * * @param alias The attribute alias that will be associated * with the attribute type URI * @param typeUri The attribute type URI * @param required If true, marks the attribute as 'required'; * 'if_available' otherwise. * @param count The number of attribute values requested. * 0 for the special value "unlimited". */ public void addAttribute(String alias, String typeUri, boolean required, int count) throws MessageException { if ( alias.indexOf(',') > -1 || alias.indexOf('.') > -1 || alias.indexOf(':') > -1 || alias.indexOf('\n') > -1 ) throw new MessageException( "Characters [.,:\\n] are not allowed in attribute aliases: " + alias); _parameters.set(new Parameter("type." + alias, typeUri)); String level = required ? "required" : "if_available"; Parameter levelParam = _parameters.getParameter(level); Parameter newParam; if (levelParam == null) { newParam = new Parameter(level, alias); } else { newParam = new Parameter(level, levelParam.getValue() + "," + alias); _parameters.removeParameters(level); } _parameters.set(newParam); setCount(alias, count); if (DEBUG) _log.debug("Added new attribute to fetch request; type: " + typeUri + " alias: " + alias + " count: " + count + " required: " + required); } /** * Adds an attribute to the fetch request, with a default value-count of 1. * * @see #addAttribute(String, String, boolean, int) */ public void addAttribute(String alias, String typeUri, boolean required) throws MessageException { addAttribute(alias, typeUri, required, 1); } /** * Sets the desired number of attribute vaules requested for the specified * attribute alias. Special value 0 means "unlimited". * * @param alias The attribute alias. */ public void setCount(String alias, int count) { if (count == 0) _parameters.set(new Parameter("count." + alias, "unlimited")); else if (count > 1) _parameters.set( new Parameter("count." + alias, Integer.toString(count))); } /** * Returns the number of values requested for the specified attribute alias. * 1 (the default number) is returned if the count parameter is absent. * 0 is returned if the special value "unlimited" was requested. * * @param alias The attribute alias. */ public int getCount(String alias) { if ("unlimited".equals(_parameters.getParameterValue("count" + alias))) return 0; else if (_parameters.hasParameter("count." + alias)) return Integer.parseInt(_parameters.getParameterValue("count." + alias)); else return 1; } /** * Sets the optional 'update_url' parameter where the OP can later re-post * fetch-response updates to the values of the requested attributes. * * @param updateUrl The URL where the RP accepts later updates * to the requested attributes. */ public void setUpdateUrl(String updateUrl) throws MessageException { try { new URL(updateUrl); } catch (MalformedURLException e) { throw new MessageException("Invalid update_url: " + updateUrl); } if (DEBUG) _log.debug("Setting fetch request update_url: " + updateUrl); _parameters.set(new Parameter("update_url", updateUrl)); } /** * Gets the optional 'update_url' parameter if available, or null otherwise. */ public String getUpdateUrl() { return _parameters.hasParameter("update_url") ? _parameters.getParameterValue("update_url") : null; } /** * Returns a map with the requested attributes. * * @param required If set to true the list of 'required' attributes * is returned, otherwise the list of 'if_available' * attributes. * @return Map of attribute aliases -> attribute type URIs. */ public Map getAttributes(boolean required) { HashMap attributes = new LinkedHashMap(); String level = required ? "required" : "if_available"; Parameter param = _parameters.getParameter(level); if (param != null) { String[] values = param.getValue().split(","); for (int i = 0; i < values.length; i++) { String alias = values[i]; attributes.put(alias, _parameters.getParameterValue("type." + alias)); } } return attributes; } /** * Gets all requested attributes (required and optional). * * @return Map of attribute aliases -> attribute type URIs. */ public Map getAttributes() { Map attributes = getAttributes(true); attributes.putAll(getAttributes(false)); return attributes; } /** * Checks the validity of the extension. * <p> * Used when constructing a extension from a parameter list. * * @return True if the extension is valid, false otherwise. */ public boolean isValid() { if ( ! _parameters.hasParameter("required") && ! _parameters.hasParameter("if_available") ) { _log.warn("One of 'required' or 'if_available' parameters must be present."); return false; } if ( ! _parameters.hasParameter("mode") || ! "fetch_request".equals(_parameters.getParameterValue("mode"))) { _log.warn("Invalid mode value in fetch_request: " + _parameters.getParameterValue("mode")); return false; } if (_parameters.hasParameter("required")) { String[] aliases = _parameters.getParameterValue("required").split(","); for (int i = 0; i < aliases.length; i++) { String alias = aliases[i]; if ( ! _parameters.hasParameter("type." + alias) ) { _log.warn("Type missing for attribute alias: " + alias); return false; } if (! checkCount(alias)) return false; } } if ( _parameters.hasParameter("if_available")) { String[] aliases = _parameters.getParameterValue("if_available").split(","); for (int i = 0; i < aliases.length; i++) { String alias = aliases[i]; if ( ! _parameters.hasParameter("type." + alias) ) { _log.warn("Type missing for attribute alias: " + alias); return false; } if (! checkCount(alias)) return false; } } Iterator it = _parameters.getParameters().iterator(); while (it.hasNext()) { String paramName = ((Parameter) it.next()).getKey(); if (! paramName.equals("mode") && ! paramName.startsWith("type.") && ! paramName.startsWith("count.") && ! paramName.equals("required") && ! paramName.equals("if_available") && ! paramName.equals("update_url")) { _log.warn("Invalid parameter name in fetch request: " + paramName); //return false; } } return true; } private boolean checkCount(String alias) { int count = getCount(alias); if ( count < 0 || ( count == 0 && ! "unlimited".equals(_parameters.getParameterValue("count." + alias))) ) { _log.warn("Invalid value for count." + alias + ": " + _parameters.getParameterValue("count." + alias)); return false; } return true; } }
package gov.nih.nci.caxchange.ctom.viewer.actions; import gov.nih.nci.c3d.webservices.client.C3DGridServiceClient; import gov.nih.nci.cagrid.caxchange.client.CaXchangeRequestProcessorClient; import gov.nih.nci.cagrid.caxchange.context.stubs.CaXchangeResponseServicePortType; import gov.nih.nci.cagrid.caxchange.context.stubs.GetResponseRequest; import gov.nih.nci.cagrid.caxchange.context.stubs.GetResponseResponse; import gov.nih.nci.cagrid.caxchange.context.stubs.service.CaXchangeResponseServiceAddressingLocator; import gov.nih.nci.cagrid.caxchange.context.stubs.types.CaXchangeResponseServiceReference; import gov.nih.nci.cagrid.common.Utils; import gov.nih.nci.caxchange.Credentials; import gov.nih.nci.caxchange.Message; import gov.nih.nci.caxchange.MessagePayload; import gov.nih.nci.caxchange.MessageTypes; import gov.nih.nci.caxchange.Metadata; import gov.nih.nci.caxchange.Request; import gov.nih.nci.caxchange.ResponseMessage; import gov.nih.nci.caxchange.ctom.viewer.constants.DisplayConstants; import gov.nih.nci.caxchange.ctom.viewer.constants.ForwardConstants; import gov.nih.nci.caxchange.ctom.viewer.forms.LabActivitiesSearchResultForm; import gov.nih.nci.caxchange.ctom.viewer.forms.LoginForm; import gov.nih.nci.caxchange.ctom.viewer.viewobjects.LabActivityResult; import gov.nih.nci.labhub.domain.II; import gov.nih.nci.logging.api.user.UserInfoHelper; import java.io.InputStream; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.StringTokenizer; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import javax.xml.namespace.QName; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.apache.axis.message.MessageElement; import org.apache.axis.message.addressing.EndpointReferenceType; import org.apache.axis.types.URI; import org.apache.log4j.Logger; import org.apache.struts.action.Action; import org.apache.struts.action.ActionError; import org.apache.struts.action.ActionErrors; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.action.ActionMessage; import org.apache.struts.action.ActionMessages; import org.w3c.dom.Document; import webservices.Documentation; import webservices.LabResult; import webservices.LoadLabsRequest; import webservices.Participant; import webservices.PerformedActivity; import webservices.PerformedStudy; import webservices.StudySubject; /** * This class performs the Load to CTMS action. It loads the selected form data to CTMS. * It checks if valid login information is in session; if not it redirects the user to login page. * * @author asharma * */ public class LoadToCTMSAction extends Action { private static final Logger logDB = Logger.getLogger(LoadToCTMSAction.class); /* (non-Javadoc) * @see org.apache.struts.action.Action#execute(org.apache.struts.action.ActionMapping, org.apache.struts.action.ActionForm, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) */ public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { ActionErrors errors = new ActionErrors(); ActionMessages messages = new ActionMessages(); HttpSession session = request.getSession(); LabActivitiesSearchResultForm lForm = (LabActivitiesSearchResultForm) form; //if the session is new or the login object is null; redirects the user to login page if (session.isNew() || (session.getAttribute(DisplayConstants.LOGIN_OBJECT) == null)) { logDB.error("No Session or User Object Forwarding to the Login Page"); return mapping.findForward(ForwardConstants.LOGIN_PAGE); } String username = ((LoginForm)session.getAttribute(DisplayConstants.LOGIN_OBJECT)).getLoginId(); UserInfoHelper.setUserInfo(username, session.getId()); try { //calls the loadToCTMS method loadToCTMS(request, lForm, username); messages.add(ActionMessages.GLOBAL_MESSAGE, new ActionMessage(DisplayConstants.MESSAGE_ID, "Messages Submitted to CTMS Successfully")); saveMessages( request, messages ); } catch (Exception cse) { errors.add(ActionErrors.GLOBAL_ERROR, new ActionError(DisplayConstants.ERROR_ID, "Error in Submitting Messages to CTMS")); saveErrors( request,errors ); logDB.error("Error sending labs to CTMS", cse); } session.setAttribute(DisplayConstants.CURRENT_FORM, lForm); //if the login is valid and the selected form data is successfully loaded to CTMS; //it returns to the search results page and displays the load successful message return (mapping.findForward(ForwardConstants.LOAD_TO_CTMS_EVENT_SUCCESS)); } /** * Collects the selectd form data and calls the EvenManager sendLabActivitiesmethod to * load the data to CTMS * @param request * @param form * @throws Exception */ private void loadToCTMS(HttpServletRequest request,ActionForm form, String username) throws Exception { LabActivitiesSearchResultForm lForm = (LabActivitiesSearchResultForm)form; HashMap map = (HashMap) request.getSession().getAttribute("RESULT_SET"); ArrayList list = new ArrayList(); String test = lForm.getRecordId(); StringTokenizer stringTokenizer = new StringTokenizer(test, ","); int count = stringTokenizer.countTokens(); // Create the list of results to send if (count >= 1) { while (stringTokenizer.hasMoreTokens()) { list.add(map.get(stringTokenizer.nextToken())); } } else { list.add(map.get(lForm.getRecordId())); } // Then create the request //C3DGridServiceClient client = new C3DGridServiceClient(url); String url = "http://cbvapp-d1017.nci.nih.gov:18080/wsrf/services/cagrid/CaXchangeRequestProcessor"; CaXchangeRequestProcessorClient client = new CaXchangeRequestProcessorClient(url); LoadLabsRequest labRequest = new LoadLabsRequest(); // Then for each lab selected set the lab information LabResult labResults[]= new LabResult[list.size()]; int i = 0; for (Iterator labs = list.iterator(); labs.hasNext();) { LabActivityResult lab = (LabActivityResult)labs.next(); // Populate the study information Documentation documentation = new Documentation(); PerformedStudy performedStudy = new PerformedStudy(); String studyId = lab.getStudyId(); if (studyId != null) { // Set the study identifier on the document webservices.II ii = new webservices.II(); ii.setExtension("STUDY:" + studyId); ii.setAssigningAuthorityName("CTODS"); ii.setRoot("C3D"); webservices.II[] iis = new webservices.II[1]; iis[0] = ii; documentation.setII(iis); } Documentation[] docs = new Documentation[1]; docs[0] = documentation; performedStudy.setDocumentation(docs); // Then set the participant and study subject assignment identifiers Participant participant= new Participant(); StudySubject studySubject= new StudySubject(); Collection<II> studySubjectIds = lab.getSubjectAssignment().getStudySubjectIdentifier(); if (studySubjectIds != null && studySubjectIds.size() > 0) { Iterator<II> idIterator = studySubjectIds.iterator(); II ssII = idIterator.next(); webservices.II ii = new webservices.II(); ii.setAssigningAuthorityName("CTODS"); ii.setRoot("C3D"); ii.setExtension("MRN:" + ssII.getExtension()); webservices.II[] iis = new webservices.II[1]; iis[0] = ii; participant.setII(iis); webservices.II ii2 = new webservices.II(); ii2.setAssigningAuthorityName("CTODS"); ii2.setRoot("C3D"); ii2.setExtension("PATIENTPOSITION:" + ssII.getExtension()); webservices.II[] iis2 = new webservices.II[1]; iis2[0] = ii2; studySubject.setII(iis2); } studySubject.setParticipant(participant); studySubject.setPerformedStudy(performedStudy); // Set the activity name PerformedActivity performedActivity= new PerformedActivity(); String testName = lab.getLabTestId(); performedActivity.setName(testName); PerformedActivity[] performedActivitys = new PerformedActivity[1]; performedActivitys[0] = performedActivity; studySubject.setPerformedActivity(performedActivitys); // Then set the lab result LabResult labResult = new LabResult(); labResult.setStudySubject(studySubject); // Set the reported date Date labDate = lab.getActualDate(); if (labDate != null) { Calendar cal = Calendar.getInstance(); cal.setTime(labDate); labResult.setReportedDateTime(cal); } // Set the lab result details String numResult = lab.getNumericResult(); if ((numResult != null) && (!numResult.equals(""))) labResult.setNumericResult(Float.parseFloat(numResult)); String txtResult = lab.getTextResult(); if ((txtResult != null) && (!txtResult.equals(""))) labResult.setTextResult(txtResult); String labUom = lab.getUnitOfMeasure(); if (labUom != null) labResult.setNumericUnit(labUom); String lowRange = lab.getLowRange(); if (lowRange != null) labResult.setReferenceRangeLow(Float.parseFloat(lowRange)); String highRange = lab.getHighRange(); if (highRange != null) labResult.setReferenceRangeHigh(Float.parseFloat(highRange)); labResults[i] = labResult; i++; } labRequest.setLabResult(labResults); PrintWriter writer = new PrintWriter("c3dmessage.xml"); QName lab = new QName("LoadLabsRequest"); Utils.serializeObject(labRequest, lab, writer); // Create the caxchange message Message requestMessage = new Message(); Metadata metadata = new Metadata(); metadata.setExternalIdentifier("CTODS"); Credentials creds = new Credentials(); creds.setUserName(username); metadata.setCredentials(creds); metadata.setMessageType(MessageTypes.LOAD_LAB_TO_CDMS); requestMessage.setMetadata(metadata); Request caxchangeRequest = new Request(); requestMessage.setRequest(caxchangeRequest); MessagePayload messagePayload = new MessagePayload(); URI uri = new URI(); uri.setPath("gme://ccts.cabig/1.0/gov.nih.nci.cabig.ccts.domain"); messagePayload.setXmlSchemaDefinition(uri); MessageElement messageElement = new MessageElement(lab, labRequest); messagePayload.set_any(new MessageElement[]{messageElement}); requestMessage.getRequest().setBusinessMessagePayload(messagePayload); CaXchangeResponseServiceReference crsr = client.processRequestAsynchronously(requestMessage); EndpointReferenceType endPointReference = crsr.getEndpointReference(); CaXchangeResponseServiceAddressingLocator locator = new CaXchangeResponseServiceAddressingLocator(); CaXchangeResponseServicePortType responsePort = locator.getCaXchangeResponseServicePortTypePort(endPointReference); boolean gotResponse=false; GetResponseResponse getResponse=null; int responseCount = 0; ResponseMessage responseMessage = null; while(!gotResponse) { try { getResponse = responsePort.getResponse(new GetResponseRequest()); gotResponse = true; responseMessage = getResponse.getCaXchangeResponseMessage(); } catch (Exception e) { logDB.info("No response from caxchange", e); responseCount++; if (responseCount > 50) { logDB.error("Never got a response from caxchange hub"); throw new Exception("Error sending to CTMS"); } Thread.sleep(1000); } } // Now send the load labs request /* webservices.Acknowledgement acknowledgement = client.loadLabs(labRequest); logDB.info("Load acknowledgement was " + acknowledgement);*/ lForm.setRecordId(""); lForm.setRecordId(null); } }
package se.sics.mspsim.core; import java.io.PrintStream; import java.util.ArrayList; import se.sics.mspsim.core.EmulationLogger.WarningMode; import se.sics.mspsim.util.ComponentRegistry; import se.sics.mspsim.util.MapEntry; import se.sics.mspsim.util.MapTable; import se.sics.mspsim.util.Utils; /** * The CPU of the MSP430 */ public class MSP430Core extends Chip implements MSP430Constants { public static final int RETURN = 0x4130; public static final boolean DEBUG = false; public static final boolean debugInterrupts = false; public static final boolean EXCEPTION_ON_BAD_OPERATION = true; // Try it out with 64 k memory public static final int MAX_MEM = 64*1024; public static final int MAX_MEM_IO = 0x200; public static final int PORTS = 6; // 16 registers of which some are "special" - PC, SP, etc. public int[] reg = new int[16]; public CPUMonitor[] regWriteMonitors = new CPUMonitor[16]; public CPUMonitor[] regReadMonitors = new CPUMonitor[16]; // For breakpoints, etc... how should memory monitors be implemented? // Maybe monitors should have a "next" pointer...? or just have a [][]? public CPUMonitor[] breakPoints = new CPUMonitor[MAX_MEM]; // true => breakpoints can occur! boolean breakpointActive = true; public int memory[] = new int[MAX_MEM]; public long cycles = 0; public long cpuCycles = 0; MapTable map; // Most HW needs only notify write and clocking, others need also read... // For notify write... public IOUnit[] memOut = new IOUnit[MAX_MEM_IO]; // For notify read... -> which will happen before actual read! public IOUnit[] memIn = new IOUnit[MAX_MEM_IO]; private IOUnit[] ioUnits; private SFR sfr; // From the possible interrupt sources - to be able to indicate is serviced. private InterruptHandler interruptSource[] = new IOUnit[16]; private int interruptMax = -1; // Op/instruction represents the last executed OP / instruction private int op; public int instruction; int servicedInterrupt = -1; InterruptHandler servicedInterruptUnit = null; private boolean interruptsEnabled = false; protected boolean cpuOff = false; // Not private since they are needed (for fast access...) protected int dcoFrq = 2500000; int aclkFrq = 32768; int smclkFrq = dcoFrq; long lastCyclesTime = 0; long lastVTime = 0; long currentTime = 0; long lastMicrosDelta; double currentDCOFactor = 1.0; // Clk A can be "captured" by timers - needs to be handled close to CPU...? private int clkACaptureMode = CLKCAPTURE_NONE; // Other clocks too... long nextEventCycles; private EventQueue vTimeEventQueue = new EventQueue(); private long nextVTimeEventCycles; private EventQueue cycleEventQueue = new EventQueue(); private long nextCycleEventCycles; private BasicClockModule bcs; private ArrayList<Chip> chips = new ArrayList<Chip>(); ComponentRegistry registry; Profiler profiler; private Flash flash; public MSP430Core(int type, ComponentRegistry registry) { // Ignore type for now... setModeNames(MODE_NAMES); this.registry = registry; int passIO = 0; // IOUnits should likely be placed in a hashtable? // Maybe for debugging purposes... ioUnits = new IOUnit[PORTS + 7]; Timer ta = new Timer(this, Timer.TIMER_Ax149, memory, 0x160); Timer tb = new Timer(this, Timer.TIMER_Bx149, memory, 0x180); for (int i = 0, n = 0x20; i < n; i++) { memOut[0x160 + i] = ta; memOut[0x180 + i] = tb; memIn[0x160 + i] = ta; memIn[0x180 + i] = tb; } Watchdog wdt = new Watchdog(this); memOut[0x120] = wdt; memIn[0x120] = wdt; /* TODO: this range is only valid for the F1611 series (Sky, etc) */ flash = new Flash(this, memory, new FlashRange(0x4000, 0x10000, 512, 64), new FlashRange(0x1000, 0x01100, 128, 64)); for (int i = 0x128; i < 0x12e; i++) { memOut[i] = flash; memIn[i] = flash; } sfr = new SFR(this, memory); for (int i = 0, n = 0x10; i < n; i++) { memOut[i] = sfr; memIn[i] = sfr; } memIn[Timer.TAIV] = ta; memOut[Timer.TAIV] = ta; memIn[Timer.TBIV] = tb; memOut[Timer.TBIV] = tb; bcs = new BasicClockModule(this, memory, 0, new Timer[] {ta, tb}); for (int i = 0x56, n = 0x59; i < n; i++) { memOut[i] = bcs; } Multiplier mp = new Multiplier(this, memory, 0); // Only cares of writes! for (int i = 0x130, n = 0x13f; i < n; i++) { memOut[i] = mp; memIn[i] = mp; } USART usart0 = new USART(this, memory, 0x70); USART usart1 = new USART(this, memory, 0x78); for (int i = 0, n = 8; i < n; i++) { memOut[0x70 + i] = usart0; memIn[0x70 + i] = usart0; memOut[0x78 + i] = usart1; memIn[0x78 + i] = usart1; } // Add port 1,2 with interrupt capability! ioUnits[0] = new IOPort(this, "1", 4, memory, 0x20); ioUnits[1] = new IOPort(this, "2", 1, memory, 0x28); for (int i = 0, n = 8; i < n; i++) { memOut[0x20 + i] = ioUnits[0]; memOut[0x28 + i] = ioUnits[1]; } // Add port 3,4 & 5,6 for (int i = 0, n = 2; i < n; i++) { ioUnits[i + 2] = new IOPort(this, "" + (3 + i), 0, memory, 0x18 + i * 4); memOut[0x18 + i * 4] = ioUnits[i + 2]; memOut[0x19 + i * 4] = ioUnits[i + 2]; memOut[0x1a + i * 4] = ioUnits[i + 2]; memOut[0x1b + i * 4] = ioUnits[i + 2]; ioUnits[i + 4] = new IOPort(this, "" + (5 + i), 0, memory, 0x30 + i * 4); memOut[0x30 + i * 4] = ioUnits[i + 4]; memOut[0x31 + i * 4] = ioUnits[i + 4]; memOut[0x32 + i * 4] = ioUnits[i + 4]; memOut[0x33 + i * 4] = ioUnits[i + 4]; } passIO = 6; // Basic clock syst. ioUnits[passIO++] = bcs; // Usarts ioUnits[passIO++] = usart0; ioUnits[passIO++] = usart1; // Add the timers ioUnits[passIO++] = ta; ioUnits[passIO++] = tb; ADC12 adc12 = new ADC12(this); ioUnits[passIO++] = adc12; ioUnits[passIO++] = sfr; for (int i = 0, n = 16; i < n; i++) { memOut[0x80 + i] = adc12; memIn[0x80 + i] = adc12; memOut[0x140 + i] = adc12; memIn[0x140 + i] = adc12; memOut[0x150 + i] = adc12; memIn[0x150 + i] = adc12; } for (int i = 0, n = 8; i < n; i++) { memOut[0x1A0 + i] = adc12; memIn[0x1A0 + i] = adc12; } if (DEBUG) System.out.println("Number of passive: " + passIO); } public Profiler getProfiler() { return profiler; } public void setProfiler(Profiler prof) { registry.registerComponent("profiler", prof); profiler = prof; profiler.setCPU(this); } /* returns port 1 ... 6 */ public IOPort getIOPort(int portID) { if (portID > 0 && portID < 7) { return (IOPort) ioUnits[portID - 1]; } return null; } public SFR getSFR() { return sfr; } public void addChip(Chip chip) { chips.add(chip); chip.setEmulationLogger(logger); } public Chip getChip(String name) { for(Chip chip : chips) { if (name.equals(chip.getName())) { return chip; } } return null; } public Chip getChip(Class<? extends Chip> type) { for(Chip chip : chips) { if (type.isInstance(chip)) { return chip; } } return null; } public Chip[] getChips() { return chips.toArray(new Chip[chips.size()]); } public void setBreakPoint(int address, CPUMonitor mon) { breakPoints[address] = mon; } public boolean hasBreakPoint(int address) { return breakPoints[address] != null; } public void clearBreakPoint(int address) { breakPoints[address] = null; } public void setRegisterWriteMonitor(int r, CPUMonitor mon) { regWriteMonitors[r] = mon; } public void setRegisterReadMonitor(int r, CPUMonitor mon) { regReadMonitors[r] = mon; } public int[] getMemory() { return memory; } public void writeRegister(int r, int value) { // Before the write! if (regWriteMonitors[r] != null) { regWriteMonitors[r].cpuAction(CPUMonitor.REGISTER_WRITE, r, value); } reg[r] = value; if (r == SR) { boolean oldCpuOff = cpuOff; // if (((value & GIE) == GIE) != interruptsEnabled) { // System.out.println("InterruptEnabled changed: " + !interruptsEnabled); interruptsEnabled = ((value & GIE) == GIE); cpuOff = ((value & CPUOFF) == CPUOFF); if (cpuOff != oldCpuOff) { // System.out.println("LPM CPUOff: " + cpuOff + " cycles: " + cycles); } if (cpuOff) { boolean scg0 = (value & SCG0) == SCG0; boolean scg1 = (value & SCG1) == SCG1; boolean oscoff = (value & OSCOFF) == OSCOFF; if (oscoff && scg1 && scg0) { setMode(MODE_LPM4); } else if (scg1 && scg0){ setMode(MODE_LPM3); } else if (scg1) { setMode(MODE_LPM2); } else if (scg0) { setMode(MODE_LPM1); } else { setMode(MODE_LPM0); } } else { setMode(MODE_ACTIVE); } } } public int readRegister(int r) { if (regReadMonitors[r] != null) { regReadMonitors[r].cpuAction(CPUMonitor.REGISTER_READ, r, reg[r]); } return reg[r]; } public int readRegisterCG(int r, int m) { // CG1 + m == 0 => SR! if ((r == CG1 && m != 0) || r == CG2) { // No monitoring here... just return the CG values return CREG_VALUES[r - 2][m]; } if (regReadMonitors[r] != null) { regReadMonitors[r].cpuAction(CPUMonitor.REGISTER_READ, r, reg[r]); } return reg[r]; } public int incRegister(int r, int value) { if (regReadMonitors[r] != null) { regReadMonitors[r].cpuAction(CPUMonitor.REGISTER_READ, r, reg[r]); } if (regWriteMonitors[r] != null) { regWriteMonitors[r].cpuAction(CPUMonitor.REGISTER_WRITE, r, reg[r] + value); } reg[r] += value; return reg[r]; } public void setACLKFrq(int frequency) { aclkFrq = frequency; } public void setDCOFrq(int frequency, int smclkFrq) { dcoFrq = frequency; this.smclkFrq = smclkFrq; // update last virtual time before updating DCOfactor lastVTime = getTime(); lastCyclesTime = cycles; lastMicrosDelta = 0; currentDCOFactor = 1.0 * BasicClockModule.MAX_DCO_FRQ / frequency; // " current: " + frequency + " DCO_FAC = " + currentDCOFactor); if (DEBUG) System.out.println("Set smclkFrq: " + smclkFrq); dcoReset(); } /* called after dcoReset */ protected void dcoReset() { } // returns global time counted in max speed of DCOs (~5Mhz) public long getTime() { long diff = cycles - lastCyclesTime; return lastVTime + (long) (diff * currentDCOFactor); } // Converts a virtual time to a cycles time according to the current // cycle speed private long convertVTime(long vTime) { long tmpTime = lastCyclesTime + (long) ((vTime - lastVTime) / currentDCOFactor); // System.out.println("ConvertVTime: vTime=" + vTime + " => " + tmpTime); return tmpTime; } // get elapsed time in seconds public double getTimeMillis() { return 1000.0 * getTime() / BasicClockModule.MAX_DCO_FRQ; } /** * getCyclesToNext - returns the number of cycles that it will take before next * execution of an event or zero if CPU is running. * @return number of cycles left before next event executes or 0 if CPU is on. */ public long getCyclesToNext() { if (!cpuOff) return 0; return cycles - nextEventCycles; } private void executeEvents() { if (cycles >= nextVTimeEventCycles) { if (vTimeEventQueue.eventCount == 0) { nextVTimeEventCycles = cycles + 10000; } else { TimeEvent te = vTimeEventQueue.popFirst(); long now = getTime(); // if (now > te.time) { // System.out.println("VTimeEvent got delayed by: " + (now - te.time) + " at " + // cycles + " target Time: " + te.time + " class: " + te.getClass().getName()); te.execute(now); if (vTimeEventQueue.eventCount > 0) { nextVTimeEventCycles = convertVTime(vTimeEventQueue.nextTime); } else { nextVTimeEventCycles = cycles + 10000; } } } if (cycles >= nextCycleEventCycles) { if (cycleEventQueue.eventCount == 0) { nextCycleEventCycles = cycles + 10000; } else { TimeEvent te = cycleEventQueue.popFirst(); te.execute(cycles); if (cycleEventQueue.eventCount > 0) { nextCycleEventCycles = cycleEventQueue.nextTime; } else { nextCycleEventCycles = cycles + 10000; } } } // Pick the one with shortest time in the future. nextEventCycles = nextCycleEventCycles < nextVTimeEventCycles ? nextCycleEventCycles : nextVTimeEventCycles; } /** * Schedules a new Time event using the cycles counter * @param event * @param time */ public void scheduleCycleEvent(TimeEvent event, long cycles) { long currentNext = cycleEventQueue.nextTime; cycleEventQueue.addEvent(event, cycles); if (currentNext != cycleEventQueue.nextTime) { nextCycleEventCycles = cycleEventQueue.nextTime; if (nextEventCycles > nextCycleEventCycles) { nextEventCycles = nextCycleEventCycles; } } } /** * Schedules a new Time event using the virtual time clock * @param event * @param time */ public void scheduleTimeEvent(TimeEvent event, long time) { long currentNext = vTimeEventQueue.nextTime; vTimeEventQueue.addEvent(event, time); if (currentNext != vTimeEventQueue.nextTime) { // This is only valid when not having a cycle event queue also... // if we have it needs to be checked also! nextVTimeEventCycles = convertVTime(vTimeEventQueue.nextTime); if (nextEventCycles > nextVTimeEventCycles) { nextEventCycles = nextVTimeEventCycles; } /* Warn if someone schedules a time backwards in time... */ if (cycles > nextVTimeEventCycles) { logger.warning(this, "Scheduling time event backwards in time!!!"); throw new IllegalStateException("Cycles are passed desired future time..."); } } } /** * Schedules a new Time event msec milliseconds in the future * @param event * @param time */ public long scheduleTimeEventMillis(TimeEvent event, double msec) { long time = (long) (getTime() + msec / 1000 * BasicClockModule.MAX_DCO_FRQ); // System.out.println("Scheduling at: " + time + " (" + msec + ") getTime: " + getTime()); scheduleTimeEvent(event, time); return time; } // Should also return active units... public IOUnit getIOUnit(String name) { for (int i = 0, n = ioUnits.length; i < n; i++) { if (name.equals(ioUnits[i].getName())) { return ioUnits[i]; } } return null; } private void resetIOUnits() { for (int i = 0, n = ioUnits.length; i < n; i++) { ioUnits[i].reset(RESET_POR); } } private void internalReset() { for (int i = 0, n = 16; i < n; i++) { interruptSource[i] = null; } servicedInterruptUnit = null; servicedInterrupt = -1; interruptMax = -1; writeRegister(SR, 0); cycleEventQueue.removeAll(); vTimeEventQueue.removeAll(); bcs.reset(); // Needs to be last since these can add events... resetIOUnits(); profiler.clearProfile(); } public void setWarningMode(EmulationLogger.WarningMode mode) { if (logger != null) { logger.setWarningMode(mode); } } public void reset() { flagInterrupt(15, null, true); } // Indicate that we have an interrupt now! // We should only get same IOUnit for same interrupt level public void flagInterrupt(int interrupt, InterruptHandler source, boolean triggerIR) { if (triggerIR) { interruptSource[interrupt] = source; if (debugInterrupts ) { if (source != null) { System.out.println("### Interrupt flagged ON by " + source.getName() + " prio: " + interrupt); } else { System.out.println("### Interrupt flagged ON by <null>"); } } // MAX priority is executed first - update max if this is higher! if (interrupt > interruptMax) { interruptMax = interrupt; if (interruptMax == 15) { // This can not be masked at all! interruptsEnabled = true; } } } else { if (interruptSource[interrupt] == source) { if (debugInterrupts) { System.out.println("### Interrupt flagged OFF by " + source.getName() + " prio: " + interrupt); } interruptSource[interrupt] = null; reevaluateInterrupts(); } } } private void reevaluateInterrupts() { interruptMax = -1; for (int i = 0; i < interruptSource.length; i++) { if (interruptSource[i] != null) interruptMax = i; } } // returns the currently serviced interrupt (vector ID) public int getServicedInterrupt() { return servicedInterrupt; } // This will be called after an interrupt have been handled // In the main-CPU loop public void handlePendingInterrupts() { // By default no int. left to process... reevaluateInterrupts(); servicedInterrupt = -1; servicedInterruptUnit = null; } // Read method that handles read from IO units! public int read(int address, boolean word) throws EmulationException { int val = 0; // Only word reads at 0x1fe which is highest address... if (address < 0x1ff && memIn[address] != null) { val = memIn[address].read(address, word, cycles); } else { address &= 0xffff; if (flash.addressInFlash(address)) { flash.notifyRead(address); } val = memory[address] & 0xff; if (word) { val |= (memory[(address + 1) & 0xffff] << 8); if ((address & 1) != 0) { printWarning(MISALIGNED_READ, address); } } } if (breakPoints[address] != null) { breakPoints[address].cpuAction(CPUMonitor.MEMORY_READ, address, val); } return val; } public void write(int dstAddress, int dst, boolean word) throws EmulationException { // TODO: optimize memory usage by tagging memory's higher bits. // will also affect below flash write stuff!!! if (breakPoints[dstAddress] != null) { breakPoints[dstAddress].cpuAction(CPUMonitor.MEMORY_WRITE, dstAddress, dst); } // Only word writes at 0x1fe which is highest address... if (dstAddress < 0x1ff && memOut[dstAddress] != null) { if (!word) dst &= 0xff; memOut[dstAddress].write(dstAddress, dst, word, cycles); // } else { // // TODO: add check for Flash / RAM! // memory[dstAddress] = dst & 0xff; // if (word) { // memory[dstAddress + 1] = (dst >> 8) & 0xff; // if ((dstAddress & 1) != 0) { // printWarning(MISALIGNED_WRITE, dstAddress); // check for Flash } else if (flash.addressInFlash(dstAddress)) { flash.flashWrite(dstAddress, dst, word); } else { // assume RAM memory[dstAddress] = dst & 0xff; if (word) { memory[dstAddress + 1] = (dst >> 8) & 0xff; if ((dstAddress & 1) != 0) { printWarning(MISALIGNED_WRITE, dstAddress); } } } } void printWarning(int type, int address) throws EmulationException { String message = null; switch(type) { case MISALIGNED_READ: message = "**** Illegal read - misaligned word from $" + Utils.hex16(address) + " at $" + Utils.hex16(reg[PC]); break; case MISALIGNED_WRITE: message = "**** Illegal write - misaligned word to $" + Utils.hex16(address) + " at $" + Utils.hex16(reg[PC]); break; } if (logger != null && message != null) { logger.warning(this, message); } } public void generateTrace(PrintStream out) { /* Override if a stack trace or other additional warning info should * be printed */ } private int serviceInterrupt(int pc) { int pcBefore = pc; int spBefore = readRegister(SP); int sp = spBefore; int sr = readRegister(SR); if (profiler != null) { profiler.profileInterrupt(interruptMax, cycles); } if (flash.blocksCPU()) { /* TODO: how should this error/warning be handled ?? */ throw new IllegalStateException( "Got interrupt while flash controller blocks CPU. CPU CRASHED."); } // Only store stuff on irq except reset... - not sure if this is correct... // TODO: Check what to do if reset is called! if (interruptMax < 15) { // Push PC and SR to stack // store on stack - always move 2 steps (W) even if B. writeRegister(SP, sp = spBefore - 2); // Put lo & hi on stack! memory[sp] = pc & 0xff; memory[sp + 1] = (pc >> 8) & 0xff; writeRegister(SP, sp = sp - 2); // Put lo & hi on stack! memory[sp] = sr & 0xff; memory[sp + 1] = (sr >> 8) & 0xff; } // Clear SR writeRegister(SR, 0); // sr & ~CPUOFF & ~SCG1 & ~OSCOFF); // Jump to the address specified in the interrupt vector writeRegister(PC, pc = memory[0xffe0 + interruptMax * 2] + (memory[0xffe0 + interruptMax * 2 + 1] << 8)); servicedInterrupt = interruptMax; servicedInterruptUnit = interruptSource[servicedInterrupt]; // Flag off this interrupt - for now - as soon as RETI is // executed things might change! reevaluateInterrupts(); if (servicedInterrupt == 15) { internalReset(); } // Interrupts take 6 cycles! cycles += 6; if (debugInterrupts) { System.out.println("### Executing interrupt: " + servicedInterrupt + " at " + pcBefore + " to " + pc + " SP before: " + spBefore); } // And call the serviced routine (which can cause another interrupt) if (servicedInterruptUnit != null) { if (debugInterrupts) { System.out.println("### Calling serviced interrupt on: " + servicedInterruptUnit.getName()); } servicedInterruptUnit.interruptServiced(servicedInterrupt); } return pc; } /* returns true if any instruction was emulated - false if CpuOff */ public boolean emulateOP(long maxCycles) throws EmulationException { //System.out.println("CYCLES BEFORE: " + cycles); int pc = readRegister(PC); long startCycles = cycles; // Interrupt processing [after the last instruction was executed] if (interruptsEnabled && servicedInterrupt == -1 && interruptMax >= 0) { pc = serviceInterrupt(pc); } /* Did not execute any instructions */ if (cpuOff || flash.blocksCPU()) { // System.out.println("Jumping: " + (nextIOTickCycles - cycles)); // nextEventCycles must exist, otherwise CPU can not wake up!? // If CPU is not active we must run the events here!!! // this can trigger interrupts that wake the CPU // Event processing while (cycles >= nextEventCycles) { executeEvents(); } if (maxCycles >= 0 && maxCycles < nextEventCycles) { // Should it just freeze or take on extra cycle step if cycles > max? cycles = cycles < maxCycles ? maxCycles : cycles; } else { cycles = nextEventCycles; } return false; } // This is quite costly... should probably be made more // efficiently if (breakPoints[pc] != null) { if (breakpointActive) { breakPoints[pc].cpuAction(CPUMonitor.BREAK, pc, 0); breakpointActive = false; return false; } else { // Execute this instruction - this is second call... breakpointActive = true; } } instruction = memory[pc] + (memory[pc + 1] << 8); op = instruction >> 12; int sp = 0; int sr = 0; boolean word = (instruction & 0x40) == 0; // Destination vars int dstRegister = 0; int dstAddress = -1; boolean dstRegMode = false; int dst = 0; boolean write = false; boolean updateStatus = true; // When is PC increased probably immediately (e.g. here)? pc += 2; writeRegister(PC, pc); switch (op) { case 1: // Single operand instructions { // Register dstRegister = instruction & 0xf; // Adress mode of destination... int ad = (instruction >> 4) & 3; int nxtCarry = 0; op = instruction & 0xff80; if (op == PUSH || op == CALL) { // The PUSH and CALL operations increase the SP before // address resolution! // store on stack - always move 2 steps (W) even if B./ sp = readRegister(SP) - 2; writeRegister(SP, sp); } if ((dstRegister == CG1 && ad > AM_INDEX) || dstRegister == CG2) { dstRegMode = true; cycles++; } else { switch(ad) { // Operand in register! case AM_REG: dstRegMode = true; cycles++; break; case AM_INDEX: // TODO: needs to handle if SR is used! dstAddress = readRegisterCG(dstRegister, ad) + memory[pc] + (memory[pc + 1] << 8); // When is PC incremented - assuming immediately after "read"? pc += 2; writeRegister(PC, pc); cycles += 4; break; // Indirect register case AM_IND_REG: dstAddress = readRegister(dstRegister); cycles += 3; break; // Bugfix suggested by Matt Thompson case AM_IND_AUTOINC: if(dstRegister == PC) { dstAddress = readRegister(PC); pc += 2; writeRegister(PC, pc); } else { dstAddress = readRegister(dstRegister); writeRegister(dstRegister, dstAddress + (word ? 2 : 1)); } cycles += 3; break; } } // Perform the read if (dstRegMode) { dst = readRegisterCG(dstRegister, ad); if (!word) { dst &= 0xff; } } else { dst = read(dstAddress, word); } switch(op) { case RRC: nxtCarry = (dst & 1) > 0 ? CARRY : 0; dst = dst >> 1; if (word) { dst |= (readRegister(SR) & CARRY) > 0 ? 0x8000 : 0; } else { dst |= (readRegister(SR) & CARRY) > 0 ? 0x80 : 0; } // Indicate write to memory!! write = true; // Set the next carry! writeRegister(SR, (readRegister(SR) & ~(CARRY | OVERFLOW)) | nxtCarry); break; case SWPB: int tmp = dst; dst = ((tmp >> 8) & 0xff) + ((tmp << 8) & 0xff00); write = true; break; case RRA: nxtCarry = (dst & 1) > 0 ? CARRY : 0; if (word) { dst = (dst & 0x8000) | (dst >> 1); } else { dst = (dst & 0x80) | (dst >> 1); } write = true; writeRegister(SR, (readRegister(SR) & ~(CARRY | OVERFLOW)) | nxtCarry); break; case SXT: // Extend Sign (bit 8-15 => same as bit 7) sr = readRegister(SR); dst = (dst & 0x80) > 0 ? dst | 0xff00 : dst & 0x7f; write = true; sr = sr & ~(CARRY | OVERFLOW); if (dst != 0) { sr |= CARRY; } writeRegister(SR, sr); break; case PUSH: if (word) { // Put lo & hi on stack! memory[sp] = dst & 0xff; memory[sp + 1] = dst >> 8; } else { // Byte => only lo byte memory[sp] = dst & 0xff; memory[sp + 1] = 0; } /* if REG or INDIRECT AUTOINC then add 2 cycles, otherwise 1 */ cycles += (ad == AM_REG || ad == AM_IND_AUTOINC) ? 2 : 1; write = false; updateStatus = false; break; case CALL: // store current PC on stack. (current PC points to next instr.) pc = readRegister(PC); memory[sp] = pc & 0xff; memory[sp + 1] = pc >> 8; writeRegister(PC, dst); /* Additional cycles: REG => 3, AM_IND_AUTO => 2, other => 1 */ cycles += (ad == AM_REG) ? 3 : (ad == AM_IND_AUTOINC) ? 2 : 1; /* profiler will be called during calls */ if (profiler != null) { MapEntry function = map.getEntry(dst); if (function == null) { function = getFunction(map, dst); } profiler.profileCall(function, cpuCycles); } write = false; updateStatus = false; break; case RETI: // Put Top of stack to Status DstRegister (TOS -> SR) sp = readRegister(SP); writeRegister(SR, memory[sp++] + (memory[sp++] << 8)); // TOS -> PC writeRegister(PC, memory[sp++] + (memory[sp++] << 8)); writeRegister(SP, sp); write = false; updateStatus = false; cycles += 4; if (debugInterrupts) { System.out.println("### RETI at " + pc + " => " + reg[PC] + " SP after: " + reg[SP]); } if (profiler != null) { profiler.profileRETI(cycles); } // This assumes that all interrupts will get back using RETI! handlePendingInterrupts(); break; default: System.out.println("Error: Not implemented instruction:" + instruction); } } break; // Jump instructions case 2: case 3: // 10 bits for address for these => 0x00fc => remove 2 bits int jmpOffset = instruction & 0x3ff; jmpOffset = (jmpOffset & 0x200) == 0 ? 2 * jmpOffset : -(2 * (0x200 - (jmpOffset & 0x1ff))); boolean jump = false; // All jump takes two cycles cycles += 2; sr = readRegister(SR); switch(instruction & 0xfc00) { case JNE: jump = (sr & ZERO) == 0; break; case JEQ: jump = (sr & ZERO) > 0; break; case JNC: jump = (sr & CARRY) == 0; break; case JC: jump = (sr & CARRY) > 0; break; case JN: jump = (sr & NEGATIVE) > 0; break; case JGE: jump = (sr & NEGATIVE) > 0 == (sr & OVERFLOW) > 0; break; case JL: jump = (sr & NEGATIVE) > 0 != (sr & OVERFLOW) > 0; break; case JMP: jump = true; break; default: System.out.println("Not implemented instruction: " + Utils.binary16(instruction)); } // Perform the Jump if (jump) { writeRegister(PC, pc + jmpOffset); } updateStatus = false; break; default: // Double operand instructions! dstRegister = instruction & 0xf; int srcRegister = (instruction >> 8) & 0xf; int as = (instruction >> 4) & 3; // AD: 0 => register direct, 1 => register index, e.g. X(Rn) dstRegMode = ((instruction >> 7) & 1) == 0; dstAddress = -1; int srcAddress = -1; int src = 0; // Some CGs should be handled as registry reads only... if ((srcRegister == CG1 && as > AM_INDEX) || srcRegister == CG2) { src = CREG_VALUES[srcRegister - 2][as]; if (!word) { src &= 0xff; } cycles += dstRegMode ? 1 : 4; } else { switch(as) { // Operand in register! case AM_REG: // CG handled above! src = readRegister(srcRegister); if (!word) { src &= 0xff; } cycles += dstRegMode ? 1 : 4; /* add cycle if destination register = PC */ if (dstRegister == PC) cycles++; break; case AM_INDEX: // Indexed if reg != PC & CG1/CG2 - will PC be incremented? srcAddress = readRegisterCG(srcRegister, as) + memory[pc] + (memory[pc + 1] << 8); // When is PC incremented - assuming immediately after "read"? incRegister(PC, 2); cycles += dstRegMode ? 3 : 6; break; // Indirect register case AM_IND_REG: srcAddress = readRegister(srcRegister); cycles += dstRegMode ? 2 : 5; break; case AM_IND_AUTOINC: if (srcRegister == PC) { /* PC is always handled as word */ srcAddress = readRegister(PC); pc += 2; incRegister(PC, 2); cycles += dstRegMode ? 2 : 5; } else { srcAddress = readRegister(srcRegister); incRegister(srcRegister, word ? 2 : 1); cycles += dstRegMode ? 2 : 5; } /* If destination register is PC another cycle is consumed */ if (dstRegister == PC) { cycles++; } break; } } // Perform the read of destination! if (dstRegMode) { if (op != MOV) { dst = readRegister(dstRegister); if (!word) { dst &= 0xff; } } } else { // PC Could have changed above! pc = readRegister(PC); if (dstRegister == 2) { /* absolute mode */ dstAddress = memory[pc] + (memory[pc + 1] << 8); } else { // CG here - probably not!??? dstAddress = readRegister(dstRegister) + memory[pc] + (memory[pc + 1] << 8); } if (op != MOV) dst = read(dstAddress, word); pc += 2; incRegister(PC, 2); } if (srcAddress != -1) { // Got very high address - check that?!! srcAddress = srcAddress & 0xffff; src = read(srcAddress, word); // if (debug) { // System.out.println("Reading from " + Utils.hex16(srcAddress) + // " => " + src); } int tmp = 0; int tmpAdd = 0; switch (op) { case MOV: // MOV dst = src; write = true; updateStatus = false; if (instruction == RETURN && profiler != null) { profiler.profileReturn(cpuCycles); } break; // FIX THIS!!! - make SUB a separate operation so that // it is clear that overflow flag is correct... case SUB: // Carry always 1 with SUB tmpAdd = 1; case SUBC: // Both sub and subc does one complement (not) + 1 (or carry) src = (src ^ 0xffff) & 0xffff; case ADDC: // ADDC if (op == ADDC || op == SUBC) tmpAdd = ((readRegister(SR) & CARRY) > 0) ? 1 : 0; case ADD: // ADD // Tmp gives zero if same sign! if sign is different after -> overf. sr = readRegister(SR); sr &= ~(OVERFLOW | CARRY); tmp = (src ^ dst) & (word ? 0x8000 : 0x80); // Includes carry if carry should be added... dst = dst + src + tmpAdd; if (dst > (word ? 0xffff : 0xff)) { sr |= CARRY; } // If tmp == 0 and currenly not the same sign for src & dst if (tmp == 0 && ((src ^ dst) & (word ? 0x8000 : 0x80)) != 0) { sr |= OVERFLOW; // System.out.println("OVERFLOW - ADD/SUB " + Utils.hex16(src) // + " + " + Utils.hex16(tmpDst)); } // System.out.println(Utils.hex16(dst) + " [SR=" + // Utils.hex16(reg[SR]) + "]"); writeRegister(SR, sr); write = true; break; case CMP: // CMP // Set CARRY if A >= B, and it's clear if A < B int b = word ? 0x8000 : 0x80; sr = readRegister(SR); sr = (sr & ~(CARRY | OVERFLOW)) | (dst >= src ? CARRY : 0); tmp = (dst - src); if (((src ^ tmp) & b) == 0 && (((src ^ dst) & b) != 0)) { sr |= OVERFLOW; } writeRegister(SR, sr); // Must set dst to the result to set the rest of the status register dst = tmp; break; case DADD: // DADD if (DEBUG) System.out.println("DADD: Decimal add executed - result error!!!"); // Decimal add... this is wrong... each nibble is 0-9... // So this has to be reimplemented... dst = dst + src + ((readRegister(SR) & CARRY) > 0 ? 1 : 0); write = true; break; case BIT: // BIT dst = src & dst; sr = readRegister(SR); // Clear overflow and carry! sr = sr & ~(CARRY | OVERFLOW); // Set carry if result is non-zero! if (dst != 0) { sr |= CARRY; } writeRegister(SR, sr); break; case BIC: // BIC // No status reg change // System.out.println("BIC: =>" + Utils.hex16(dstAddress) + " => " // + Utils.hex16(dst) + " AS: " + as + // " sReg: " + srcRegister + " => " + src + // " dReg: " + dstRegister + " => " + dst); dst = (~src) & dst; write = true; updateStatus = false; break; case BIS: // BIS dst = src | dst; write = true; updateStatus = false; break; case XOR: // XOR sr = readRegister(SR); sr = sr & ~(CARRY | OVERFLOW); if ((src & (word ? 0x8000 : 0x80)) != 0 && (dst & (word ? 0x8000 : 0x80)) != 0) { sr |= OVERFLOW; } dst = src ^ dst; if (dst != 0) { sr |= CARRY; } write = true; writeRegister(SR, sr); break; case AND: // AND sr = readRegister(SR); sr = sr & ~(CARRY | OVERFLOW); dst = src & dst; if (dst != 0) { sr |= CARRY; } write = true; writeRegister(SR, sr); break; default: System.out.println("DoubleOperand not implemented: " + op + " at " + pc); if (EXCEPTION_ON_BAD_OPERATION) { EmulationException ex = new EmulationException("Bad operation: " + op + " at " + pc); ex.initCause(new Throwable("" + pc)); throw ex; } } } if (word) { dst &= 0xffff; } else { dst &= 0xff; } if (write) { if (dstRegMode) { writeRegister(dstRegister, dst); } else { dstAddress &= 0xffff; write(dstAddress, dst, word); } } if (updateStatus) { // Update the Zero and Negative status! // Carry and overflow must be set separately! sr = readRegister(SR); sr = (sr & ~(ZERO | NEGATIVE)) | ((dst == 0) ? ZERO : 0) | (word ? ((dst & 0x8000) > 0 ? NEGATIVE : 0) : ((dst & 0x80) > 0 ? NEGATIVE : 0)); writeRegister(SR, sr); } //System.out.println("CYCLES AFTER: " + cycles); // Event processing (when CPU is awake) while (cycles >= nextEventCycles) { executeEvents(); } cpuCycles += cycles - startCycles; return true; } public String getName() { return "MSP430 Core"; } public int getModeMax() { return MODE_MAX; } MapEntry getFunction(MapTable map, int address) { MapEntry function = new MapEntry(MapEntry.TYPE.function, address, 0, "fkn at $" + Utils.hex16(address), null, true); map.setEntry(function); return function; } public int getPC() { return reg[PC]; } }
package org.se.lab.service; import org.easymock.*; import org.junit.*; import org.junit.runner.RunWith; import org.se.lab.data.Community; import org.se.lab.data.CommunityDAO; import org.se.lab.data.Enumeration; import org.se.lab.data.User; import java.util.ArrayList; import java.util.List; import static org.easymock.EasyMock.*; import static org.hamcrest.CoreMatchers.is; @RunWith(EasyMockRunner.class) public class CommunityServiceTest { public static final int ID = 1; public static final String NAME = "name"; public static final String DESCRIPTION = "description"; public static final Enumeration APPROVE_STATE = new Enumeration(2); public static final Enumeration PENDING_STATE = new Enumeration(1); public static final Enumeration REFUSED_STATE = new Enumeration(3); @TestSubject private CommunityService communityService = new CommunityService(); @Rule public EasyMockRule mocks = new EasyMockRule(this); @Mock private CommunityDAO communityDAO; private Community community1; private Community community2; private Community community3; List<Community> communities; @Before public void setUp() throws Exception { community1 = new Community("name1", "description1"); community1.setState(APPROVE_STATE); community2 = new Community("name2", "description2"); community2.setState(PENDING_STATE); community3 = new Community("name3", "description3"); community3.setState(REFUSED_STATE); communities = new ArrayList<>(); } @Test public void approve() { Community community = new Community(NAME, DESCRIPTION); Community communityResult = new Community(NAME, DESCRIPTION); community.setState(APPROVE_STATE); Capture<Community> communityCapture = new Capture<Community>(); expect(communityDAO.update(capture(communityCapture))).andReturn(communityResult); replay(communityDAO); communityService.approve(community); Assert.assertThat(communityCapture.getValue().getState(), is(APPROVE_STATE)); } @Test public void request() { Community community = new Community(NAME, DESCRIPTION); Community communityResult = new Community(NAME, DESCRIPTION); community.setState(PENDING_STATE); Capture<Community> communityCapture = new Capture<Community>(); expect(communityDAO.insert(capture(communityCapture))).andReturn(communityResult); replay(communityDAO); communityService.request(community); Assert.assertThat(communityCapture.getValue().getState(), is(PENDING_STATE)); } @Test public void findAll_Successful(){ communities.add(community1); communities.add(community2); expect(communityDAO.findAll()).andReturn(communities); communityService.findAll(); } @Test public void getApproved_Successful(){ communities.add(community1); expect(communityDAO.findApprovedCommunities()).andReturn(communities); communityService.getApproved(); } @Test public void getPending_Successful(){ communities.add(community2); expect(communityDAO.findPendingCommunities()).andReturn(communities); communityService.getPending(); } @Test public void delete_Successful(){ communityDAO.delete(community1); expectLastCall(); communityService.delete(community1); } @Test public void update_Successful() { expect(communityDAO.update(community1)).andReturn(community1); communityService.update(community1); } @Test public void join_Successful(){ User user = new User("username2", "pwd"); communityService.join(community1,user); } @Test (expected = ServiceException.class) public void join_Fail(){ communityService.join(community1,null); } @Test public void findById_Successful(){ expect(communityDAO.findById(ID)).andReturn(community1); communityService.findById(ID); } @Test public void refuse_Successful(){ community3.setState(PENDING_STATE); expect(communityDAO.update(community3)).andReturn(community3); replay(communityDAO); communityService.refuse(community3); Assert.assertThat(community3.getState(), is(REFUSED_STATE)); } @Test (expected = ServiceException.class) public void refuse_Fail(){ communityService.refuse(community3); } }
package com.github.dandelion.core.storage; import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.github.dandelion.core.DandelionException; /** * <p> * Storage for all bundles, based on a directed acyclic graph (dag). * * @author Thibault Duchateau * @since 0.10.0 */ public class BundleStorage { private static final Logger LOG = LoggerFactory.getLogger(BundleStorage.class); private BundleDag bundleDag; public BundleStorage() { this.bundleDag = new BundleDag(); } public BundleDag getBundleDag() { return bundleDag; } /** * <p> * Load all given bundle storage units into the {@link BundleDag}. * * @param bundleStorageUnits * All bundle storage units to load into the dag. * @return the {@link BundleDag} updated with the new * {@link BundleStorageUnit} and {@link AssetStorageUnit}. * @throws DandelionException * as soon as a cycle is introduced in the bundle DAG. */ public BundleDag storeBundles(List<BundleStorageUnit> bundleStorageUnits) { for (BundleStorageUnit bsu : bundleStorageUnits) { BundleStorageUnit bsuToAdd = bundleDag.addVertexIfNeeded(bsu); // DAG updating and dependencies handling if (bsu.getDependencies() != null && !bsu.getDependencies().isEmpty()) { for (String dependency : bsu.getDependencies()) { BundleStorageUnit to = bundleDag.addVertexIfNeeded(dependency); bundleDag.addEdge(bsuToAdd, to); } } else { bsuToAdd = bundleDag.addVertexIfNeeded(bsu); } // Asset updating // The bundle to add contains assets if (bsu.getAssetStorageUnits() != null) { // Let's see if each asset already exists in any bundle for (AssetStorageUnit asu : bsu.getAssetStorageUnits()) { boolean exists = false; for (BundleStorageUnit existingBundle : bundleDag.getVerticies()) { for (AssetStorageUnit existingAsu : existingBundle.getAssetStorageUnits()) { // Si un asset de meme nom existe deja, on l'ecrase if (existingAsu.getName().equalsIgnoreCase(asu.getName()) && existingAsu.getType().equals(asu.getType())) { LOG.debug( "Replacing asset '{}' ({}) from the bundle '{}' by the asset {} ({}) from the bundle {}.", existingAsu.getName(), existingAsu.getVersion(), existingBundle.getName(), asu.getName(), asu.getVersion(), bsuToAdd.getName()); existingAsu.setVersion(asu.getVersion()); existingAsu.setLocations(asu.getLocations()); existingAsu.setDom(asu.getDom()); existingAsu.setType(asu.getType()); existingAsu.setAttributes(asu.getAttributes()); existingAsu.setAttributesOnlyName(asu.getAttributesOnlyName()); exists = true; break; } } if (exists) { break; } } // If the asset doesn't already exist, we just add it to the // current bundle if (!exists) { LOG.debug("Adding {} '{}' ({}) to the bundle '{}'", asu.getType(), asu.getName(), asu.getVersion(), bsuToAdd.getName()); bsuToAdd.getAssetStorageUnits().add(asu); } } } else { // TODO LOG.warn("Aucun asset defini. Cas a traiter"); } } return bundleDag; } public void checkBundleDag() { for (BundleStorageUnit bsu : bundleDag.getVerticies()) { if (bsu.getAssetStorageUnits() == null || bsu.getAssetStorageUnits().isEmpty()) { LOG.warn("Empty bundle: {}", bsu.getName()); } } } /** * Return the list of labels of bundles according to the topological sort. * * @param bundleName * The name of the bundle. * * @return The list of bundle names sorted by a topological order. The list * also contains the given bundle name, always in last. */ public Set<BundleStorageUnit> bundlesFor(String bundleName) { BundleStorageUnit bsu = bundleDag.getVertex(bundleName); if (bsu != null) { Set<BundleStorageUnit> retval = null; if (bsu.isLeaf()) { retval = new HashSet<BundleStorageUnit>(1); retval.add(bsu); } else { retval = new LinkedHashSet<BundleStorageUnit>(TopologicalSorter.sort(bsu)); } return retval; } return Collections.emptySet(); } public Set<BundleStorageUnit> bundlesFor(String... bundleNames) { Set<BundleStorageUnit> retval = new LinkedHashSet<BundleStorageUnit>(); for (String bundleName : bundleNames) { retval.addAll(bundlesFor(bundleName.trim())); } return retval; } }
package T145.magistics.common; import net.minecraft.creativetab.CreativeTabs; import net.minecraft.item.Item; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import T145.magistics.common.config.MagisticsConfig; import cpw.mods.fml.client.event.ConfigChangedEvent; import cpw.mods.fml.common.FMLCommonHandler; import cpw.mods.fml.common.Mod; import cpw.mods.fml.common.Mod.EventHandler; import cpw.mods.fml.common.Mod.Instance; import cpw.mods.fml.common.SidedProxy; import cpw.mods.fml.common.event.FMLInitializationEvent; import cpw.mods.fml.common.event.FMLPostInitializationEvent; import cpw.mods.fml.common.event.FMLPreInitializationEvent; import cpw.mods.fml.common.eventhandler.SubscribeEvent; import cpw.mods.fml.common.network.NetworkRegistry; @Mod(modid = Magistics.modid, version = "0.6.0", guiFactory = "T145.magistics.client.gui.config.MagisticsConfigGuiFactory", dependencies = "after:Thaumcraft") public class Magistics { public static final String modid = "Magistics"; @Instance(modid) public static Magistics instance; @SidedProxy(clientSide = "T145.magistics.client.ClientProxy", serverSide = "T145.magistics.common.CommonProxy") public static CommonProxy proxy; public static Logger logger = LogManager.getLogger(modid); public static void log(String message) { if (MagisticsConfig.debug) logger.log(Level.INFO, message); } public static void error(String message, Exception error) { if (MagisticsConfig.debug) logger.log(Level.ERROR, message, error); } public static CreativeTabs tabMagistics = new CreativeTabs(Magistics.modid.toLowerCase()) { @Override public Item getTabIconItem() { return Item.getItemFromBlock(MagisticsConfig.blocks[0]); } }; @SubscribeEvent public void onConfigChanged(ConfigChangedEvent.OnConfigChangedEvent e) { if (e.modID.equals(modid)) MagisticsConfig.sync(); } @EventHandler public void preInit(FMLPreInitializationEvent e) { FMLCommonHandler.instance().bus().register(instance); MagisticsConfig.preInit(e.getSuggestedConfigurationFile()); } @EventHandler public void init(FMLInitializationEvent e) { MagisticsConfig.init(); proxy.registerRenderInformation(); NetworkRegistry.INSTANCE.registerGuiHandler(instance, proxy); } @EventHandler public void postInit(FMLPostInitializationEvent e) { MagisticsConfig.postInit(); } }
package br.com.dbsoft.util; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Enumeration; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; public class DBSUnzipFile { private ZipEntry wFileZipedObject; /** * Descompacta arquivo a partir do path do arquivo zipado * * @param pFileZiped * @return */ public File unzipFile(ZipFile pFileZiped, String pDiretorio) { @SuppressWarnings("rawtypes") Enumeration xFileZipedElements; //"/home/jose_addario/"; try { xFileZipedElements = pFileZiped.entries(); while (xFileZipedElements.hasMoreElements()) { wFileZipedObject = (ZipEntry) xFileZipedElements.nextElement(); if (wFileZipedObject.isDirectory()) { System.err.println("Descompactando diretório: " + wFileZipedObject.getName()); (new File(pDiretorio + wFileZipedObject.getName())).getName(); continue; } System.out.println("Descompactando arquivo:" + wFileZipedObject.getName()); pvCopyInputStream(pFileZiped.getInputStream(wFileZipedObject), new BufferedOutputStream(new FileOutputStream( pDiretorio + wFileZipedObject.getName()))); } } catch (IOException ioexception) { System.err.println("Erro ao descompactar:" + ioexception.getMessage()); } return new File(pDiretorio + wFileZipedObject.getName()); } private static final void pvCopyInputStream(InputStream pInputStream, OutputStream pOutputStream) throws IOException { byte[] xBuffer = new byte[1024]; int xFileLenght; while ((xFileLenght = pInputStream.read(xBuffer)) >= 0) { pOutputStream.write(xBuffer, 0, xFileLenght); } pInputStream.close(); pOutputStream.close(); } }
package ch.wijngaards; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; import org.springframework.security.core.userdetails.User; import org.springframework.security.core.userdetails.UserDetailsService; import org.springframework.security.provisioning.InMemoryUserDetailsManager; @Configuration @EnableWebSecurity public class WebSecurityConfig extends WebSecurityConfigurerAdapter { // expose Spring Actuator security role for re-use with Jolokia private static final Logger LOG = LoggerFactory.getLogger(WebSecurityConfig.class); @Value("${management.security.role}") private String managementSecurityRole; @Override protected void configure(HttpSecurity http) throws Exception { http // fix incompatibility with Hawt.io .csrf().disable() /* now configure auth - secure /jolokia and rest secured by @Secured annotation */
package co.ulock.api; import java.security.Principal; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.ResponseEntity; import org.springframework.transaction.annotation.Transactional; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; import co.ulock.api.dao.PasswordRepository; import co.ulock.api.data.Password; @CrossOrigin @RestController public class PasswordController { @Autowired private PasswordRepository dao; @RequestMapping(path = "/passwords") public List<Password> get(Principal principal) { return dao.findByAccountId(principal.getName()); } @RequestMapping(path = "/passwords/{passwordId}", method = RequestMethod.GET) public ResponseEntity<?> getById(Principal principal, @PathVariable String passwordId) { Password findOne = dao.findOne(passwordId); if (findOne != null && findOne.getAccountId().equals(principal.getName())) { return ResponseEntity.ok(findOne); } else { return ResponseEntity.notFound().build(); } } @Transactional @RequestMapping(path = "/passwords", method = RequestMethod.POST) public Password create(@RequestBody Password site, Principal principal) { site.setAccountId(principal.getName()); return dao.save(site); } @Transactional @RequestMapping(path = "/passwords/{passwordId}", method = RequestMethod.PUT) public ResponseEntity<?> update(@RequestBody Password site, @PathVariable String passwordId, Principal principal) { Password findOne = dao.findOne(passwordId); if (findOne != null && findOne.getAccountId().equals(principal.getName())) { findOne.setData(site.getData()); return ResponseEntity.ok(dao.save(findOne)); } else { return ResponseEntity.notFound().build(); } } @Transactional @RequestMapping(path = "/passwords/{passwordId}", method = RequestMethod.DELETE) public ResponseEntity<?> delete(@PathVariable String passwordId, Principal principal) { Password findOne = dao.findOne(passwordId); if (findOne != null && findOne.getAccountId().equals(principal.getName())) { dao.delete(findOne); return ResponseEntity.noContent().build(); } else { return ResponseEntity.notFound().build(); } } }
package com.boundary.sdk.event; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.BasicParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.Option; import org.apache.commons.cli.ParseException; public class EventCLI { private Options options; private Option apiHost; private Option apiKey; private Option command; private Option orgId; private Option optionCreatedAt; private Option optionFingerprintFields; private Option optionMessage; // private Option optionOrganizationId; private Option optionProperties; private Option optionReceivedAt; private Option optionSender; private Option optionSeverity; private Option optionSource; private Option optionStatus; private Option optionTags; private Option optionTitle; public EventCLI() { // create Options object options = new Options(); } @SuppressWarnings("static-access") private void handleCommandlandArguments(String[] args) { apiHost = OptionBuilder.withArgName("api_host").hasArg() .withDescription("Boundary API Host. defaults to api.boundary.com").create("b"); command = OptionBuilder.withArgName("command").hasArg() .withDescription("One of CREATE, UPDATE, OR DELETE").create("c"); apiKey = OptionBuilder.withArgName("api_key").hasArg() .withDescription("Boundary API Key").create("a"); apiKey.setLongOpt("api-key"); orgId = OptionBuilder.withArgName("org_id").hasArg() .withDescription("Boundary organization Id") .withLongOpt("org-id").create("o"); optionCreatedAt = OptionBuilder.withArgName("yyyy-mm-dd HH-MM-SS").hasArg() .withDescription("Date and time of event creation") .withLongOpt("created-at").create("z"); optionFingerprintFields = OptionBuilder.withArgName("field-name").hasArg() .withDescription("The fields of the event used to calculate the event fingerprint.") .withLongOpt("fingerprint-fields").create("f"); optionMessage = OptionBuilder.withArgName("message").hasArg() .withDescription("Additional description of the event") .withLongOpt("message").create("m"); optionProperties = OptionBuilder.withArgName("properties").hasArg() .withDescription("Properties for the event.") .withLongOpt("properties").create("p"); optionReceivedAt = OptionBuilder.withArgName("yyyy-mm-dd HH-MM-SS").hasArg() .withDescription("The timestamp the event was received.") .withLongOpt("received-at").create("r"); optionSender = OptionBuilder.withArgName("ref:type[:name]").hasArg() .withDescription("Optional information about the sender of the event.") .withLongOpt("sender").create("x"); optionSeverity = OptionBuilder.withArgName("severity").hasArg() .withDescription("Severity of the event which is one of INFO, WARN, ERROR, CRITICAL. Default is INFO.") .withLongOpt("severity").create("y"); optionSource = OptionBuilder.withArgName("ref:type[:name]").hasArg() .withDescription("The source of the event. The source is typically the hostname or ip address of the system this event refers to.") .withLongOpt("status").create("u"); optionStatus = OptionBuilder.withArgName("status").hasArg() .withDescription("One of OPEN, CLOSED, ACKNOWLEDGED, or OK.") .withLongOpt("status").create("w"); optionTags = OptionBuilder.withArgName("tag1[:tag2][:tag3][:...]").hasArg() .withDescription("Tags used to provide a classification for events.") .withLongOpt("tags").create("t"); optionTitle = OptionBuilder .withDescription("Title of the event") .withLongOpt("title").create("n"); try { // add h option options.addOption("h", false, "Show help"); // add v option options.addOption(apiHost); options.addOption(apiKey); options.addOption(orgId); options.addOption(command); // Event options options.addOption(optionCreatedAt); options.addOption(optionFingerprintFields); options.addOption(optionMessage); // options.addOption(optionOrganizationId); options.addOption(optionProperties); options.addOption(optionReceivedAt); options.addOption(optionSender); options.addOption(optionSeverity); options.addOption(optionSource); options.addOption(optionStatus); options.addOption(optionTitle); options.addOption(optionTags); CommandLineParser parser = new BasicParser(); CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("h")) { // automatically generate the help statement HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("becli", options); System.exit(0); } } catch (ParseException e) { e.printStackTrace(); System.exit(1); } } public static void main(String[] args) { EventCLI event = new EventCLI(); event.handleCommandlandArguments(args); } }
package com.codeborne.selenide; import com.codeborne.selenide.ex.DialogTextMismatch; import com.codeborne.selenide.ex.JavaScriptErrorsFound; import com.codeborne.selenide.impl.*; import org.openqa.selenium.*; import org.openqa.selenium.interactions.Actions; import org.openqa.selenium.logging.LogEntry; import org.openqa.selenium.support.PageFactory; import org.openqa.selenium.support.ui.FluentWait; import java.net.URL; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import static com.codeborne.selenide.Configuration.dismissModalDialogs; import static com.codeborne.selenide.Configuration.timeout; import static com.codeborne.selenide.WebDriverRunner.*; import static com.codeborne.selenide.impl.WebElementProxy.wrap; import static java.lang.System.currentTimeMillis; import static java.util.Collections.emptyList; import static java.util.concurrent.TimeUnit.MILLISECONDS; /** * The main starting point of Selenide. * * You start with methods {@link #open(String)} for opening the tested application page and * {@link #$(String)} for searching web elements. */ public class Selenide { private static final Logger log = Logger.getLogger(Selenide.class.getName()); public static Navigator navigator = new Navigator(); public static void open(String relativeOrAbsoluteUrl) { navigator.open(relativeOrAbsoluteUrl); mockModalDialogs(); } /** * @see Selenide#open(String) */ public static void open(URL absoluteUrl) { navigator.open(absoluteUrl); mockModalDialogs(); } private static boolean doDismissModalDialogs() { return !supportsModalDialogs() || dismissModalDialogs; } private static void mockModalDialogs() { if (doDismissModalDialogs()) { String jsCode = " window._selenide_modalDialogReturnValue = true;\n" + " window.alert = function(message) {};\n" + " window.confirm = function(message) {\n" + " return window._selenide_modalDialogReturnValue;\n" + " };"; try { executeJavaScript(jsCode); } catch (UnsupportedOperationException cannotExecuteJsAgainstPlainTextPage) { log.warning(cannotExecuteJsAgainstPlainTextPage.toString()); } } } /** * Open a web page and create PageObject for it. * @return PageObject of given class */ public static <PageObjectClass> PageObjectClass open(String relativeOrAbsoluteUrl, Class<PageObjectClass> pageObjectClassClass) { open(relativeOrAbsoluteUrl); return page(pageObjectClassClass); } public static <PageObjectClass> PageObjectClass open(URL absoluteUrl, Class<PageObjectClass> pageObjectClassClass) { open(absoluteUrl); return page(pageObjectClassClass); } /** * Close the browser if it's open */ public static void close() { closeWebDriver(); } /** * Reload current page */ public static void refresh() { navigator.open(url()); } /** * Navigate browser back to previous page */ public static void back() { navigator.back(); } /** * Navigate browser forward to next page */ public static void forward() { navigator.forward(); } public static String title() { return getWebDriver().getTitle(); } /** * Not recommended. Test should not sleep, but should wait for some condition instead. * @param milliseconds Time to sleep in milliseconds */ public static void sleep(long milliseconds) { try { Thread.sleep(milliseconds); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } } /** * Take the screenshot of current page and save to file fileName.html and fileName.png * @param fileName Name of file (without extension) to save HTML and PNG to * @return The name of resulting file */ public static String screenshot(String fileName) { return Screenshots.takeScreenShot(fileName); } /** * Wrap standard Selenium WebElement into SelenideElement * to use additional methods like shouldHave(), selectOption() etc. * * @param webElement standard Selenium WebElement * @return given WebElement wrapped into SelenideElement */ public static SelenideElement $(WebElement webElement) { return wrap(webElement); } /** * Find the first element matching given CSS selector * @param cssSelector any CSS selector like "input[name='first_name']" or "#messages .new_message" * @return SelenideElement * @throws NoSuchElementException if element was no found */ public static SelenideElement $(String cssSelector) { return getElement(By.cssSelector(cssSelector)); } /** * Find the first element matching given CSS selector * @param seleniumSelector any Selenium selector like By.id(), By.name() etc. * @return SelenideElement * @throws NoSuchElementException if element was no found */ public static SelenideElement $(By seleniumSelector) { return getElement(seleniumSelector); } /** * @see #getElement(By, int) */ public static SelenideElement $(By seleniumSelector, int index) { return getElement(seleniumSelector, index); } /** * Find the first element matching given CSS selector * @param parent the WebElement to search elements in * @param cssSelector any CSS selector like "input[name='first_name']" or "#messages .new_message" * @return SelenideElement * @throws NoSuchElementException if element was no found */ public static SelenideElement $(WebElement parent, String cssSelector) { return WaitingSelenideElement.wrap($(parent), By.cssSelector(cssSelector), 0); } /** * Find the Nth element matching given criteria * @param cssSelector any CSS selector like "input[name='first_name']" or "#messages .new_message" * @param index 0..N * @return SelenideElement * @throws NoSuchElementException if element was no found */ public static SelenideElement $(String cssSelector, int index) { return WaitingSelenideElement.wrap(null, By.cssSelector(cssSelector), index); } /** * Find the Nth element matching given criteria * @param parent the WebElement to search elements in * @param cssSelector any CSS selector like "input[name='first_name']" or "#messages .new_message" * @param index 0..N * @return SelenideElement * @throws NoSuchElementException if element was no found */ public static SelenideElement $(WebElement parent, String cssSelector, int index) { return WaitingSelenideElement.wrap($(parent), By.cssSelector(cssSelector), index); } public static SelenideElement $(WebElement parent, By selector) { return WaitingSelenideElement.wrap($(parent), selector, 0); } public static SelenideElement $(WebElement parent, By selector, int index) { return WaitingSelenideElement.wrap($(parent), selector, index); } public static ElementsCollection $$(Collection<? extends WebElement> elements) { return new ElementsCollection(new WebElementsCollectionWrapper(elements)); } /** * Find all elements matching given CSS selector. * Methods returns an ElementsCollection which is a list of WebElement objects that can be iterated, * and at the same time is implementation of WebElement interface, * meaning that you can call methods .sendKeys(), click() etc. on it. * * @param cssSelector any CSS selector like "input[name='first_name']" or "#messages .new_message" * @return empty list if element was no found */ public static ElementsCollection $$(String cssSelector) { return new ElementsCollection(new BySelectorCollection(By.cssSelector(cssSelector))); } /** * Find all elements matching given CSS selector. * Methods returns an ElementsCollection which is a list of WebElement objects that can be iterated, * and at the same time is implementation of WebElement interface, * meaning that you can call methods .sendKeys(), click() etc. on it. * * @param seleniumSelector any Selenium selector like By.id(), By.name() etc. * @return empty list if element was no found */ public static ElementsCollection $$(By seleniumSelector) { return new ElementsCollection(new BySelectorCollection(seleniumSelector)); } /** * Find all elements matching given CSS selector inside given parent element * Methods returns an ElementsCollection which is a list of WebElement objects that can be iterated, * and at the same time is implementation of WebElement interface, * meaning that you can call methods .sendKeys(), click() etc. on it. * * @param parent the WebElement to search elements in * @param cssSelector any CSS selector like "input[name='first_name']" or "#messages .new_message" * @return empty list if element was no found */ public static ElementsCollection $$(WebElement parent, String cssSelector) { return new ElementsCollection(new BySelectorCollection(parent, By.cssSelector(cssSelector))); } /** * Find all elements matching given criteria inside given parent element * @see Selenide#$$(WebElement, String) */ public static ElementsCollection $$(WebElement parent, By seleniumSelector) { return new ElementsCollection(new BySelectorCollection(parent, seleniumSelector)); } /** * Find the first element matching given criteria * @param criteria instance of By: By.id(), By.className() etc. * @return SelenideElement * @throws NoSuchElementException if element was no found */ public static SelenideElement getElement(By criteria) { return WaitingSelenideElement.wrap(null, criteria, 0); } /** * Find the Nth element matching given criteria * @param criteria instance of By: By.id(), By.className() etc. * @param index 0..N * @return SelenideElement * @throws NoSuchElementException if element was no found */ public static SelenideElement getElement(By criteria, int index) { return WaitingSelenideElement.wrap(null, criteria, index); } /** * Find all elements matching given CSS selector * @param criteria instance of By: By.id(), By.className() etc. * @return empty list if element was no found */ public static ElementsCollection getElements(By criteria) { return $$(criteria); } @SuppressWarnings("unchecked") public static <T> T executeJavaScript(String jsCode, Object... arguments) { return (T) ((JavascriptExecutor) getWebDriver()).executeScript(jsCode, arguments); } /** * Not recommended. It's better to use method {@code $(radioField).selectRadio(value);} * * Select radio field by value * @param radioField any By selector for finding radio field * @param value value to select (should match an attribute "value") * @return the selected radio field */ public static SelenideElement selectRadio(By radioField, String value) { return $(radioField).selectRadio(value); } public static SelenideElement getSelectedRadio(By radioField) { for (WebElement radio : $$(radioField)) { if (radio.getAttribute("checked") != null) { return wrap(radio); } } return null; } public static void onConfirmReturn(boolean confirmReturnValue) { if (doDismissModalDialogs()) { executeJavaScript("window._selenide_modalDialogReturnValue = " + confirmReturnValue + ';'); } } /** * Accept (Click "Yes" or "Ok") in the confirmation dialog (javascript 'alert' or 'confirm'). * Method does nothing in case of HtmlUnit browser (since HtmlUnit does not support alerts). * * @param expectedDialogText if not null, check that confirmation dialog displays this message (case-sensitive) * @throws DialogTextMismatch if confirmation message differs from expected message */ public static void confirm(String expectedDialogText) { if (!doDismissModalDialogs()) { Alert alert = waitForAlert(); String actualDialogText = alert.getText(); alert.accept(); checkDialogText(expectedDialogText, actualDialogText); } } private static Alert waitForAlert() { final long startTime = currentTimeMillis(); NoAlertPresentException lastError; do { try { Alert alert = getWebDriver().switchTo().alert(); alert.getText(); // check that alert actually exists return alert; } catch (NoAlertPresentException e) { lastError = e; } } while (currentTimeMillis() - startTime <= timeout); throw lastError; } /** * Dismiss (click "No" or "Cancel") in the confirmation dialog (javascript 'alert' or 'confirm'). * Method does nothing in case of HtmlUnit browser (since HtmlUnit does not support alerts). * * @param expectedDialogText if not null, check that confirmation dialog displays this message (case-sensitive) * @throws DialogTextMismatch if confirmation message differs from expected message */ public static void dismiss(String expectedDialogText) { if (!doDismissModalDialogs()) { Alert alert = waitForAlert(); String actualDialogText = alert.getText(); alert.dismiss(); checkDialogText(expectedDialogText, actualDialogText); } } private static void checkDialogText(String expectedDialogText, String actualDialogText) { if (expectedDialogText != null && !expectedDialogText.equals(actualDialogText)) { Screenshots.takeScreenShot(Selenide.class.getName(), Thread.currentThread().getName()); throw new DialogTextMismatch(actualDialogText, expectedDialogText); } } public static SelenideTargetLocator switchTo() { return new SelenideTargetLocator(getWebDriver().switchTo()); } public static WebElement getFocusedElement() { return (WebElement) executeJavaScript("return document.activeElement"); } /** * Create a Page Object instance. * @see PageFactory#initElements(WebDriver, Class) */ public static <PageObjectClass> PageObjectClass page(Class<PageObjectClass> pageObjectClass) { try { return page(pageObjectClass.getConstructor().newInstance()); } catch (Exception e) { throw new RuntimeException("Failed to create new instance of " + pageObjectClass, e); } } /** * Create a Page Object instance. * @see PageFactory#initElements(WebDriver, Class) */ public static <PageObjectClass, T extends PageObjectClass> PageObjectClass page(T pageObject) { PageFactory.initElements(new SelenideFieldDecorator(getWebDriver()), pageObject); return pageObject; } public static FluentWait<WebDriver> Wait() { return new FluentWait<WebDriver>(getWebDriver()) .withTimeout(timeout, MILLISECONDS) .pollingEvery(Configuration.pollingInterval, MILLISECONDS); } public static Actions actions() { return new Actions(getWebDriver()); } /** * Switch to window/tab by title * @deprecated Same as switchTo().window(title) */ @Deprecated public static void switchToWindow(String title) { switchTo().window(title); } /** * @deprecated The same as {@code switchTo().window(index);} * @param index index of window (0-based) */ @Deprecated public static void switchToWindow(int index) { switchTo().window(index); } public static List<String> getJavascriptErrors() { if (!WebDriverRunner.webdriverContainer.hasWebDriverStarted()) { return emptyList(); } try { List<Object> errors = executeJavaScript("return window._selenide_jsErrors"); if (errors == null || errors.isEmpty()) { return emptyList(); } List<String> result = new ArrayList<String>(errors.size()); for (Object error : errors) { result.add(error.toString()); } return result; } catch (WebDriverException cannotExecuteJs) { log.severe(cannotExecuteJs.toString()); return emptyList(); } } /** * Check if there is not JS errors on the page * @throws JavaScriptErrorsFound */ public static void assertNoJavascriptErrors() throws JavaScriptErrorsFound { List<String> jsErrors = getJavascriptErrors(); if (jsErrors != null && !jsErrors.isEmpty()) { throw new JavaScriptErrorsFound(jsErrors); } } /** * Zoom current page (in or out). * @param factor e.g. 1.1 or 2.0 or 0.5 */ public static void zoom(double factor) { executeJavaScript( "document.body.style.transform = 'scale(' + arguments[0] + ')';" + "document.body.style.transformOrigin = '0 0';", factor ); } /** * Same as com.codeborne.selenide.Selenide#getWebDriverLogs(java.lang.String, java.util.logging.Level) * * EXPERIMENTAL! Use with caution. */ public static List<String> getWebDriverLogs(String logType) { return getWebDriverLogs(logType, Level.ALL); } public static List<String> getWebDriverLogs(String logType, Level logLevel) { return listToString(getLogEntries(logType, logLevel)); } private static List<LogEntry> getLogEntries(String logType, Level logLevel) { try { return getWebDriver().manage().logs().get(logType).filter(logLevel); } catch (UnsupportedOperationException ignore) { return emptyList(); } } private static <T> List<String> listToString(List<T> objects) { if (objects == null || objects.isEmpty()) { return emptyList(); } List<String> result = new ArrayList<String>(objects.size()); for (T object : objects) { result.add(object.toString()); } return result; } }
package com.extjs.selenium.tab; import com.extjs.selenium.ExtJsComponent; import com.sdl.selenium.WebLocatorUtils; import com.sdl.selenium.web.SearchType; import com.sdl.selenium.web.WebDriverConfig; import com.sdl.selenium.web.WebLocator; import com.sdl.selenium.web.utils.Utils; import org.apache.log4j.Logger; import org.openqa.selenium.By; import org.openqa.selenium.ElementNotVisibleException; import org.openqa.selenium.WebElement; import java.util.List; public class TabPanel extends ExtJsComponent { private static final Logger logger = Logger.getLogger(TabPanel.class); private TabPanel() { setClassName("TabPanel"); setBaseCls("x-tab-panel"); } public TabPanel(String text) { this(); setText(text, SearchType.EQUALS); } public TabPanel(WebLocator container, String text) { this(text); setContainer(container); } private String getTitlePath() { String returnPath = ""; if (hasText()) { /** * this method return the path of the main TabPanel (that contains also this Tab/Panel) * * @return the path of the main TabPanel */ private String getBaseTabPanelPath() { String selector = getBasePath(); if (hasText()) { /** * this method return the path of only one visible div from the main TabPanel * * @param disabled disabled * @return the path of only one visible div from the main TabPanel */ @Override public String getItemPath(boolean disabled) { String selector = getBaseTabPanelPath(); selector += "/*/*[contains(@class, 'x-tab-panel-body')]" + //TODO /** * After the tab is set to active will wait 50ms to make sure tab is rendered * * @return true or false */ public boolean setActive() {
package com.fishercoder.solutions; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; /** * 1182. Shortest Distance to Target Color * * You are given an array colors, in which there are three colors: 1, 2 and 3. * You are also given some queries. Each query consists of two integers i and c, * return the shortest distance between the given index i and the target color c. If there is no solution return -1. * * Example 1: * Input: colors = [1,1,2,1,3,2,2,3,3], queries = [[1,3],[2,2],[6,1]] * Output: [3,0,3] * Explanation: * The nearest 3 from index 1 is at index 4 (3 steps away). * The nearest 2 from index 2 is at index 2 itself (0 steps away). * The nearest 1 from index 6 is at index 3 (3 steps away). * * Example 2: * Input: colors = [1,2], queries = [[0,3]] * Output: [-1] * Explanation: There is no 3 in the array. * * Constraints: * 1 <= colors.length <= 5*10^4 * 1 <= colors[i] <= 3 * 1 <= queries.length <= 5*10^4 * queries[i].length == 2 * 0 <= queries[i][0] < colors.length * 1 <= queries[i][1] <= 3 * */ public class _1182 { public static class Solution1 { public List<Integer> shortestDistanceColor(int[] colors, int[][] queries) { Map<Integer, List<Integer>> map = buildMap(colors); List<Integer> result = new ArrayList<>(); for (int[] query : queries) { result.add(binarySearch(query[0], map.get(query[1]))); } return result; } private Integer binarySearch(int index, List<Integer> indices) { if (indices == null) { return -1; } int left = 0; int right = indices.size() - 1; int minDistance = Integer.MAX_VALUE; while (left <= right) { int mid = left + (right - left) / 2; if (indices.get(mid) == index) { return 0; } else if (indices.get(mid) > index) { minDistance = Math.min(minDistance, indices.get(mid) - index); right = mid - 1; } else { minDistance = Math.min(minDistance, index - indices.get(mid)); left = mid + 1; } } return minDistance; } private Map<Integer, List<Integer>> buildMap(int[] colors) { Map<Integer, List<Integer>> map = new HashMap<>(); for (int i = 0; i < colors.length; i++) { if (!map.containsKey(colors[i])) { map.put(colors[i], new ArrayList<>()); } map.get(colors[i]).add(i); } return map; } } }
package com.fishercoder.solutions; public class _1283 { public static class Solution { public int smallestDivisor(int[] nums, int threshold) { int start = 1; int result = 0; int end = Integer.MAX_VALUE; while (start <= end) { int middle = start + (end - start) / 2; if (isSumLessThanThreshold(middle, nums, threshold)) { result = middle; end = middle - 1; } else { start = middle + 1; } } return result; } private boolean isSumLessThanThreshold(int middle, int[] nums, int threshold) { int sum = 0; for (int i = 0; i < nums.length; i++) { if (nums[i] % middle == 0) { sum += nums[i] / middle; } else { sum += nums[i] / middle + 1; } } return sum <= threshold; } } }
package com.google.testing; import com.google.testing.TestSuiteProto.Property.Builder; import com.google.testing.TestSuiteProto.TestCase; import com.google.testing.TestSuiteProto.TestSuite; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamConstants; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import java.io.InputStream; import java.nio.charset.Charset; /** * STaX parser for the Ant (Junit task) XML test results format. * @author alexeagle@google.com (Alex Eagle) */ public class AntXmlParser { XMLInputFactory xmlInputFactory = XMLInputFactory.newInstance(); public TestSuite parse(InputStream in, Charset encoding) { TestSuite.Builder builder = TestSuite.newBuilder(); try { XMLStreamReader xmlStreamReader = xmlInputFactory.createXMLStreamReader(in, encoding.name()); while (xmlStreamReader.hasNext()) { int next = xmlStreamReader.next(); if (next == XMLStreamConstants.END_DOCUMENT) { break; } if (!xmlStreamReader.hasName()) { continue; } switch (xmlStreamReader.getName().toString()) { case "testsuite": parseSuite(xmlStreamReader, builder); break; default: } } } catch (XMLStreamException e) { throw new RuntimeException(e); } return builder.build(); } private void parseSuite(XMLStreamReader xmlStreamReader, TestSuite.Builder builder) throws XMLStreamException { for (int i = 0; i < xmlStreamReader.getAttributeCount(); i++) { String attributeValue = xmlStreamReader.getAttributeValue(i); switch (xmlStreamReader.getAttributeName(i).toString()) { case "name": builder.setName(attributeValue); break; case "tests": builder.setTotalCount(Integer.parseInt(attributeValue)); break; case "time": builder.setElapsedTimeMillis((long) (Float.parseFloat(attributeValue) * 1000)); break; case "errors": builder.setErrorCount(Integer.parseInt(attributeValue)); break; case "failures": builder.setFailureCount(Integer.parseInt(attributeValue)); break; case "skipped": builder.setSkippedCount(Integer.parseInt(attributeValue)); break; } } String tagName = null; do { xmlStreamReader.next(); if (!xmlStreamReader.hasName()) { continue; } tagName = xmlStreamReader.getName().toString(); if (xmlStreamReader.isStartElement()) { switch (tagName) { case "properties": parseProperties(xmlStreamReader, builder); break; case "testcase": parseTestCase(xmlStreamReader, builder); break; } } } while (!xmlStreamReader.isEndElement() || !"testsuite".equals(tagName)); } private void parseProperties(XMLStreamReader xmlStreamReader, TestSuite.Builder suiteBuilder) throws XMLStreamException { String tagName = null; do { xmlStreamReader.next(); if (!xmlStreamReader.hasName()) { continue; } tagName = xmlStreamReader.getName().toString(); if (xmlStreamReader.isStartElement()) { switch (tagName) { case "property": Builder builder = suiteBuilder.addPropertyBuilder(); for (int i = 0; i < xmlStreamReader.getAttributeCount(); i++) { String attributeValue = xmlStreamReader.getAttributeValue(i); switch (xmlStreamReader.getAttributeName(i).toString()) { case "name": builder.setName(attributeValue); break; case "value": builder.setValue(attributeValue); break; } } break; } } } while (!xmlStreamReader.isEndElement() || !"properties".equals(tagName)); } private void parseTestCase(XMLStreamReader xmlStreamReader, TestSuite.Builder suiteBuilder) { TestCase.Builder builder = suiteBuilder.addTestCaseBuilder(); for (int i = 0; i < xmlStreamReader.getAttributeCount(); i++) { String attributeValue = xmlStreamReader.getAttributeValue(i); switch (xmlStreamReader.getAttributeName(i).toString()) { case "name": builder.setName(attributeValue); break; case "classname": builder.setClassName(attributeValue); break; case "time": builder.setElapsedTimeMillis((long) (Float.parseFloat(attributeValue) * 1000)); break; } } } }
package org.drools.persistence.info; import java.util.Date; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.Lob; import javax.persistence.PrePersist; import javax.persistence.PreUpdate; import javax.persistence.Transient; import javax.persistence.Version; import org.drools.persistence.EntityInfo; import org.drools.persistence.SessionMarshallingHelper; @Entity public class SessionInfo implements EntityInfo { private @Id @GeneratedValue(strategy = GenerationType.AUTO) long id; @Version @Column(name = "OPTLOCK") private int version; private Date startDate; private Date lastModificationDate; @Lob private byte[] rulesByteArray; @Transient SessionMarshallingHelper helper; public SessionInfo() { this.startDate = new Date(); } public long getId() { return this.id; } public int getVersion() { return this.version; } public void setJPASessionMashallingHelper(SessionMarshallingHelper helper) { this.helper = helper; } public SessionMarshallingHelper getJPASessionMashallingHelper() { return helper; } public void setData( byte[] data) { this.rulesByteArray = data; } public byte[] getData() { return this.rulesByteArray; } public Date getStartDate() { return this.startDate; } public Date getLastModificationDate() { return this.lastModificationDate; } public void setLastModificationDate(Date date) { this.lastModificationDate = date; } @PrePersist @PreUpdate public void update() { this.rulesByteArray = this.helper.getSnapshot(); } }
package com.imcode.imcms.servlet; import com.imcode.imcms.mapping.DocumentMapper; import imcode.server.*; import imcode.server.document.DocumentDomainObject; import imcode.server.document.FileDocumentDomainObject; import imcode.server.document.HtmlDocumentDomainObject; import imcode.server.document.UrlDocumentDomainObject; import imcode.server.document.textdocument.TextDocumentDomainObject; import imcode.server.kerberos.KerberosLoginResult; import imcode.server.kerberos.KerberosLoginStatus; import imcode.server.user.UserDomainObject; import imcode.util.Utility; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.time.StopWatch; import org.apache.log4j.Logger; import org.apache.log4j.NDC; import org.apache.oro.text.perl.Perl5Util; import javax.servlet.ServletException; import javax.servlet.ServletOutputStream; import javax.servlet.http.*; import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; import java.net.SocketException; import java.util.Date; import java.util.HashMap; import java.util.Stack; /** * Retrieves document by metaId. */ public class GetDoc extends HttpServlet { public static final String REQUEST_PARAMETER__FILE_ID = "file_id"; private final static Logger TRACK_LOG = Logger.getLogger(ImcmsConstants.ACCESS_LOG); private final static Logger LOG = Logger.getLogger(GetDoc.class.getName()); private final static String NO_ACTIVE_DOCUMENT_URL = "no_active_document.html"; private static final String HTTP_HEADER_REFERRER = "Referer";// Note, intended misspelling of "Referrer", according to the HTTP spec. /** * Renders document. * <p/> * This method is called only from doGet and from AdminDoc.adminDoc if a user does not have rights to edit a document. * * @see com.imcode.imcms.servlet.admin.AdminDoc#adminDoc */ public static void viewDoc(String documentId, HttpServletRequest req, HttpServletResponse res) throws IOException, ServletException { DocumentMapper documentMapper = Imcms.getServices().getDocumentMapper(); final String langCode = Imcms.getUser().getDocGetterCallback().getLanguage().getCode(); DocumentDomainObject document = documentMapper.getVersionedDocument(documentId, langCode, req); if (null == document) { res.sendError(HttpServletResponse.SC_NOT_FOUND); return; } viewDoc(document, req, res); } /** * This method is called from viewDoc and from ImcmsSetupFilter.handleDocumentUrl only. * * @see ImcmsSetupFilter */ public static void viewDoc(DocumentDomainObject document, HttpServletRequest req, HttpServletResponse res) throws IOException, ServletException { NDC.push("" + document.getId()); try { StopWatch stopWatch = new StopWatch(); stopWatch.start(); privateGetDoc(document, res, req); stopWatch.stop(); long renderTime = stopWatch.getTime(); LOG.trace("Rendering document " + document.getId() + " took " + renderTime + "ms."); } finally { NDC.pop(); } } private static void privateGetDoc(DocumentDomainObject document, HttpServletResponse res, HttpServletRequest req) throws IOException, ServletException { ImcmsServices imcref = Imcms.getServices(); HttpSession session = req.getSession(true); UserDomainObject user = Utility.getLoggedOnUser(req); DocumentMapper documentMapper = imcref.getDocumentMapper(); if (null == document) { res.sendError(HttpServletResponse.SC_NOT_FOUND); return; } @SuppressWarnings("unchecked") Stack<Integer> history = (Stack<Integer>) req.getSession().getAttribute("history"); if (history == null) { history = new Stack<>(); req.getSession().setAttribute("history", history); } Integer docId = document.getId(); if (Utility.isTextDocument(document) && (history.empty() || !history.peek().equals(docId))) { history.push(docId); } String referrer = req.getHeader(HTTP_HEADER_REFERRER); DocumentDomainObject referringDocument = null; Perl5Util perlrx = new Perl5Util(); if (null != referrer && perlrx.match("/meta_id=(\\d+)/", referrer)) { int referring_meta_id = Integer.parseInt(perlrx.group(1)); referringDocument = documentMapper.getDocument(referring_meta_id); } DocumentRequest documentRequest = new DocumentRequest(imcref, user, document, referringDocument, req, res); documentRequest.setEmphasize(req.getParameterValues("emp")); Cookie[] cookies = req.getCookies(); HashMap cookieHash = new HashMap(); for (int i = 0; cookies != null && i < cookies.length; ++i) { Cookie currentCookie = cookies[i]; cookieHash.put(currentCookie.getName(), currentCookie.getValue()); } Revisits revisits = new Revisits(); if (cookieHash.get("imVisits") == null) { Date now = new Date(); long lNow = now.getTime(); String sNow = "" + lNow; Cookie resCookie = new Cookie("imVisits", session.getId() + sNow); resCookie.setMaxAge(31500000); resCookie.setPath("/"); res.addCookie(resCookie); revisits.setRevisitsId(session.getId()); revisits.setRevisitsDate(sNow); } else { revisits.setRevisitsId(cookieHash.get("imVisits").toString()); } documentRequest.setRevisits(revisits); if (!user.canAccess(document)) { if (imcref.getConfig().isSsoEnabled() && user.isDefaultUser()) { KerberosLoginResult loginResult = imcref.getKerberosLoginService().login(req, res); if (loginResult.getStatus() == KerberosLoginStatus.SUCCESS) { privateGetDoc(document, res, req); } return; } Utility.forwardToLogin(req, res); return; } if (!document.isPublished() && !user.canEdit(document)) { res.setStatus(HttpServletResponse.SC_FORBIDDEN); Utility.setDefaultHtmlContentType(res); imcref.getAdminTemplate(NO_ACTIVE_DOCUMENT_URL, user, null); return; } if (document instanceof UrlDocumentDomainObject) { String url_ref = ((UrlDocumentDomainObject) document).getUrl(); res.sendRedirect(url_ref); // Log to accesslog TRACK_LOG.info(documentRequest); return; } else if (document instanceof HtmlDocumentDomainObject) { Utility.setDefaultHtmlContentType(res); String htmlDocumentData = ((HtmlDocumentDomainObject) document).getHtml(); TRACK_LOG.info(documentRequest); res.getWriter().write(htmlDocumentData); } else if (document instanceof FileDocumentDomainObject) { String fileId = req.getParameter(REQUEST_PARAMETER__FILE_ID); FileDocumentDomainObject fileDocument = (FileDocumentDomainObject) document; FileDocumentDomainObject.FileDocumentFile file = fileDocument.getFileOrDefault(fileId); String filename = file.getFilename(); String mimetype = file.getMimeType(); InputStream fr; try { fr = new BufferedInputStream(file.getInputStreamSource().getInputStream()); } catch (IOException ex) { res.sendError(HttpServletResponse.SC_NOT_FOUND); return; } // Workaround for #11619 - Android device refuses to download a file from build-in browser. // Might not help if user agent is changed manually and does not contain "android". String browserId = req.getHeader("User-Agent"); boolean attachment = req.getParameter("download") != null || (browserId != null && browserId.toLowerCase().contains("android")); int len = fr.available(); String content_disposition = (attachment ? "attachment" : "inline") + "; filename=\"" + filename + "\""; ServletOutputStream out = null; try { out = res.getOutputStream(); res.setContentLength(len); res.setContentType(mimetype); res.setHeader("Content-Disposition", content_disposition); try { IOUtils.copy(fr, out); } catch (SocketException ex) { LOG.debug("Exception occurred", ex); } } finally { IOUtils.closeQuietly(fr); IOUtils.closeQuietly(out); } // Log to accesslog TRACK_LOG.info(documentRequest); } else if (document instanceof TextDocumentDomainObject) { Utility.setDefaultHtmlContentType(res); req.getRequestDispatcher("/api/viewDoc/" + document.getId()).forward(req, res); } } public void doPost(HttpServletRequest req, HttpServletResponse res) throws IOException, ServletException { doGet(req, res); } public void doGet(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { String documentId = req.getParameter("meta_id"); viewDoc(documentId, req, res); } }
package org.dspace.identifier; import org.apache.log4j.Logger; import org.dspace.app.util.NoidGenerator; import org.dspace.authorize.AuthorizeException; import org.dspace.content.*; import org.dspace.content.Collection; import org.dspace.core.ConfigurationManager; import org.dspace.core.Context; import org.dspace.core.LogManager; import org.dspace.doi.CDLDataCiteService; import org.dspace.doi.DryadDOIRegistrationHelper; import org.dspace.doi.DOI; import org.dspace.doi.DOIFormatException; import org.dspace.doi.Minter; import org.dspace.storage.rdbms.DatabaseManager; import org.dspace.storage.rdbms.TableRow; import org.dspace.utils.DSpace; import org.dspace.versioning.Version; import org.dspace.versioning.VersionHistory; import org.dspace.versioning.VersioningService; import org.dspace.workflow.DryadWorkflowUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Required; import org.springframework.stereotype.Component; import java.io.*; import java.math.BigInteger; import java.net.MalformedURLException; import java.net.URL; import java.security.SecureRandom; import java.sql.SQLException; import java.util.*; @Component public class DOIIdentifierProvider extends IdentifierProvider implements org.springframework.beans.factory.InitializingBean { private static Logger log = Logger.getLogger(DOIIdentifierProvider.class); private static DCValue identifierMetadata = new DCValue(); private static final char DOT = '.'; private static final char SLASH = '/'; // Max number of files attached to a package; completely arbitrary private static final int MAX_NUM_OF_FILES = 150; private String myHdlPrefix; private String myHostname; private String myDataPkgColl; private String myDataFileColl; private String myLocalPartPrefix; private String myDoiPrefix; private int mySuffixVarLength; private final SecureRandom myRandom = new SecureRandom(); Minter perstMinter = null; private String[] supportedPrefixes = new String[]{"info:doi/", "doi:" , "http://dx.doi.org/"}; public void afterPropertiesSet() throws Exception { myHdlPrefix = configurationService.getProperty("handle.prefix"); myHostname = configurationService.getProperty("dryad.url"); myDataPkgColl = configurationService.getProperty("stats.datapkgs.coll"); myDataFileColl = configurationService.getProperty("stats.datafiles.coll"); if (configurationService.getPropertyAsType("doi.service.testmode", true)) { myDoiPrefix = configurationService.getProperty("doi.testprefix"); } else { myDoiPrefix = configurationService.getProperty("doi.prefix"); } myLocalPartPrefix = configurationService.getProperty("doi.localpart.suffix"); try{ mySuffixVarLength = Integer.parseInt(configurationService.getProperty("doi.suffix.length")); }catch (NumberFormatException nfe){ mySuffixVarLength=5; } identifierMetadata.schema = MetadataSchema.DC_SCHEMA; identifierMetadata.element = "identifier"; identifierMetadata.qualifier = null; } public boolean supports(String identifier) { for(String prefix : supportedPrefixes){ if(identifier.startsWith(prefix)) return true; } return false; } public String register(Context context, DSpaceObject dso) throws IdentifierException { try { if (dso instanceof Item && dso.getHandle() != null) { String doi = mintAndRegister(context, (Item) dso, true); ((Item) dso).clearMetadata(identifierMetadata.schema, identifierMetadata.element, identifierMetadata.qualifier, null); ((Item) dso).addMetadata(identifierMetadata.schema, identifierMetadata.element, identifierMetadata.qualifier, null, doi); } } catch (Exception e) { log.error(LogManager.getHeader(context, "Error while attempting to register doi", "Item id: " + dso.getID())); throw new IdentifierException("Error while registering doi identifier", e); } return null; } public String mint(Context context, DSpaceObject dso) throws IdentifierException { try { if (dso instanceof Item && dso.getHandle() != null) { String doi = mintAndRegister(context, (Item) dso, false); ((Item) dso).clearMetadata(identifierMetadata.schema, identifierMetadata.element, identifierMetadata.qualifier, null); ((Item) dso).addMetadata(identifierMetadata.schema, identifierMetadata.element, identifierMetadata.qualifier, null, doi); } } catch (Exception e) { log.error(LogManager.getHeader(context, "Error while attempting to mint doi", "Item id: " + dso.getID())); throw new IdentifierException("Error while retrieving doi identifier", e); } return null; } public void moveCanonical(Context context, DSpaceObject dso) throws IdentifierException { try{ Item item = (Item) dso; String doi = getDoiValue((Item) dso); DOI doi_ = new DOI(doi, item); String collection = getCollection(context, item); moveCanonical(item, true, collection, myDataPkgColl, doi_); }catch (Exception e) { log.error(LogManager.getHeader(context, "Error while attempting to moveCanonical doi", "Item id: " + dso.getID())); throw new IdentifierException("Error while moving doi identifier", e); } } public void delete(Context context, DSpaceObject dso) throws IdentifierException { try { if (dso instanceof Item) { Item item = (Item) dso; String doi = getDoiValue((Item) dso); // Remove from DOI service only if the item is not registered if(doi!=null){ if(!item.isArchived()){ remove(doi.toString()); } // if it is already registered it has to remain in DOI service and when someone looks for it go towards a "tombstone" page // reassign the URL of the DOI else{ DOI removedDOI = new DOI(doi.toString(), DOI.Type.TOMBSTONE); mint(removedDOI, true, null); } } // If it is the most current version occurs to move the canonical to the previous version VersionHistory history = retrieveVersionHistory(context, item); if(history!=null && history.getLatestVersion().getItem().equals(item) && history.size() > 1){ Item previous = history.getPrevious(history.getLatestVersion()).getItem(); DOI doi_ = new DOI(doi, previous); String collection = getCollection(context, previous); String myDataPkgColl = configurationService.getProperty("stats.datapkgs.coll"); moveCanonical(previous, true, collection, myDataPkgColl, doi_); } // IF Deleting a 1st version not archived yet: // The DOI stored in the previous should revert to the version without ".1". // Canonical DOI already point to the right item: no needs to move it if(history!=null && history.size() == 2 && !item.isArchived()){ revertDoisFirstItem(context, history); } } } catch (Exception e) { log.error(LogManager.getHeader(context, "Error while attempting to register doi", "Item id: " + dso.getID())); throw new IdentifierException("Error while moving doi identifier", e); } } private String mintAndRegister(Context context, Item item, boolean register) throws Exception { String doi = getDoiValue(item); String collection = getCollection(context, item); String myDataPkgColl = configurationService.getProperty("stats.datapkgs.coll"); VersionHistory history = retrieveVersionHistory(context, item); // CASE A: it is a versioned datafile and the user is modifying its content (adding or removing bitstream) upgrade version number. if(item.isArchived()){ if(!collection.equals(myDataPkgColl)){ if(lookup(doi)!=null){ log.debug("case A -- updating DOI info for versioned data file"); DOI doi_= upgradeDOIDataFile(context, doi, item, history); if(doi_!=null){ remove(doi); // Not checking for blackout here because item is already archived mint(doi_, register, createListMetadata(item)); item.clearMetadata(identifierMetadata.schema, identifierMetadata.element, identifierMetadata.qualifier, null); item.update(); if (doi == null || doi.equals("")) throw new Exception(); } } } } // CASE B: New Item or New version // FIRST time a VERSION is created 2 identifiers will be minted and the canonical will be updated to point to the newer URL: // - id.1-->old URL // - id.2-->new URL // - id(canonical)-- new URL // Next times 1 identifier will be minted and the canonical will be updated to point to the newer URL // - id.x-->new URL // - id(canonical)-- new URL // If it is a new ITEM just 1 identifier will be minted else{ DOI doi_ = calculateDOI(context, doi, item, history); log.info("DOI just minted: " + doi_); doi = doi_.toString(); if(DryadDOIRegistrationHelper.isDataPackageInPublicationBlackout(item)) { mint(doi_, "http://datadryad.org/publicationBlackout", register, createListMetadata(item)); } else { mint(doi_, register, createListMetadata(item)); } // CASE B1: Versioned DataPackage or DataFiles if (history != null) { log.debug("it's a new version; need to move the canonical identifier"); Version version = history.getVersion(item); // if it is the first time that is called "create version": mint identifier ".1" Version previous = history.getPrevious(version); if (history.isFirstVersion(previous)) { DOI firstDOI = calculateDOIFirstVersion(context, previous); if(DryadDOIRegistrationHelper.isDataPackageInPublicationBlackout(item)) { mint(firstDOI, "http://datadryad.org/publicationBlackout", register, createListMetadata(previous.getItem())); } else { mint(firstDOI, register, createListMetadata(previous.getItem())); } } } } return doi; } private void revertDoisFirstItem(Context context, VersionHistory history) throws SQLException, IOException, AuthorizeException{ Item previous = history.getPrevious(history.getLatestVersion()).getItem(); String collection = getCollection(context, previous); // remove doi from DOI service .1 String doiPrevious = getDoiValue(previous); DOI removedDOI = new DOI(doiPrevious.toString(), DOI.Type.TOMBSTONE); mint(removedDOI, true, null); if (collection.equals(myDataPkgColl)) { // replace doi metadata: dryad.2335.1 with dryad.2335 revertIdentierItem(previous); } else { // replace doi metadata: dryad.2335.1/1.1 with dryad.2335/1 revertIdentifierDF(previous); } } private void moveCanonical(Item item, boolean register, String collection, String myDataPkgColl, DOI doi_) throws IOException { // move the canonical DOI canonical = null; if (collection.equals(myDataPkgColl)) { canonical = getCanonicalDataPackage(doi_, item); } else { canonical = getCanonicalDataFile(doi_, item); } mint(canonical, register, createListMetadata(item)); } private void mint(DOI doi, boolean register, Map<String, String> metadata) throws IOException { mint(doi, null, register, metadata); } private void mint(DOI doi, String target, boolean register, Map<String, String> metadata) throws IOException { perstMinter.mintDOI(doi); if(register) { perstMinter.register(doi, target, metadata); } } private Map<String, String> createListMetadata(Item item){ Map<String, String> metadata = new HashMap<String, String>(); metadata.putAll(CDLDataCiteService.createMetadataList(item)); return metadata; } /** * Returns the doi value in the metadata (if present, else null will be returned) * * @param item the item to check for a doi * @return the doi string */ public static String getDoiValue(Item item) { DCValue[] doiVals = item.getMetadata(identifierMetadata.schema, identifierMetadata.element, identifierMetadata.qualifier, Item.ANY); if (doiVals != null && 0 < doiVals.length) { return doiVals[0].value; } return null; } public DSpaceObject resolve(Context context, String identifier, String... attributes) throws IdentifierNotFoundException, IdentifierNotResolvableException { // convert http DOIs to short form if (identifier.startsWith("http://dx.doi.org/")) { identifier = "doi:" + identifier.substring("http://dx.doi.org/".length()); } // correct http DOIs to short form if a slash was removed by the browser/server if (identifier.startsWith("http:/dx.doi.org/")) { identifier = "doi:" + identifier.substring("http:/dx.doi.org/".length()); } if (identifier != null && identifier.startsWith("doi:")) { DOI dbDOI = perstMinter.getKnownDOI(identifier); if(dbDOI==null) { throw new IdentifierNotFoundException(); } String value = dbDOI.getInternalIdentifier(); if (value != null) { // Ask Parent Service to retrieve internal reference to resource identified in the value. return parentService.resolve(context,value); } } return null; } public String lookup(String identifier) { String url=null; if (identifier != null && identifier.startsWith("doi:")) { DOI doi = perstMinter.getKnownDOI(identifier); if(doi!=null) url=doi.getTargetURL().toString(); } return url; } public String lookupByURL(String url) { if (url != null) { Set<DOI> dois = perstMinter.getKnownDOIByURL(url); if (dois == null || dois.size() == 0) throw new RuntimeException("Unknown DOI for URL: " + url); String result = ""; for (DOI d : dois) { result += d.toString() + " "; } return result; } return null; } public String lookupEzidRegistration(Item item) throws IOException { String aDOI = getDoiValue(item); return perstMinter.lookupDOIRegistration(aDOI); } public String getEzidRegistrationURL(Item item) { String aDoi = getDoiValue(item); return perstMinter.getRegistrationURL(aDoi); } public boolean remove(String identifier) { if (identifier != null && identifier.startsWith("doi:")) { DOI doi = perstMinter.getKnownDOI(identifier); return perstMinter.remove(doi); } return false; } /** * The field used for identification of DataPackage and DataFile in other areas of * codebase such as Workflow. * * @return */ public static DCValue getIdentifierMetadata() { return identifierMetadata; } /** * The PerstMinter delivered from Spring. * * @param perstMinter */ @Autowired @Required public void setPerstMinter(Minter perstMinter) { this.perstMinter = perstMinter; } // OLDER DryadDOIMinter Methods /** * Creates a DOI from the supplied DSpace URL string * * @param context * @param aDoi * @param item * @param vh * @return */ private DOI calculateDOI(Context context, String aDoi, Item item, VersionHistory vh) { URL itemURL; String url; DOI doi = null; doi = getDOI(aDoi, item); log.debug("calculateDOI() doi already exist? : " + (doi!=null)); // If our DOI doesn't exist, then we need to mint one if (doi == null) { try { context.turnOffAuthorisationSystem(); String collection = getCollection(context, item); log.debug("collection is " + collection); // DATAPACKAGE if (collection.equals(myDataPkgColl)) { doi = calculateDOIDataPackage(context, item, vh); } // DATAFILE else if (collection.equals(myDataFileColl)) { doi = calculateDOIDataFile(item, vh); } } catch (ClassCastException details) { throw new RuntimeException(details); } catch (SQLException details) { if (context != null) { context.abort(); } throw new RuntimeException(details); } catch (Exception details) { throw new RuntimeException(details); } } return doi; } private DOI calculateDOIFirstVersion(Context c, Version previous) throws SQLException { DOI doi; String idDoi = DOIIdentifierProvider.getDoiValue(previous.getItem()); doi = new DOI(idDoi, previous.getItem()); return doi; } private DOI getDOI(String aDoi, Item item) { DOI doi = null; if (aDoi == null) return null; doi = new DOI(aDoi, item); if (!exists(doi)) return null; return doi; } private synchronized DOI calculateDOIDataPackage(Context c, Item item, VersionHistory history) throws IOException, IdentifierException, AuthorizeException, SQLException { DOI doi, oldDoi = null; // Versioning: if it is a new version of an existing Item, the new DOI must be: oldDOI.(versionNumber), retrieve previous Item if (history != null) { Version version = history.getVersion(item); Version previous = history.getPrevious(version); String previousDOI = DOIIdentifierProvider.getDoiValue(previous.getItem()); // FIRST time a VERSION is created: update identifier of the previous item adding ".1" if (history.isFirstVersion(previous)) { previousDOI= updateIdentifierPreviousItem(previous.getItem()); } String canonical = previousDOI.substring(0, previousDOI.lastIndexOf(DOT)); String versionNumber = "" + DOT + (version.getVersionNumber()); doi = new DOI(canonical + versionNumber, item); } else { String var = NoidGenerator.buildVar(mySuffixVarLength); doi = new DOI(myDoiPrefix, myLocalPartPrefix + var, item); if (existsIdDOI(doi.toString())) return calculateDOIDataPackage(c, item, history); } return doi; } private boolean exists(DOI doi) { String dbDoiURL = lookup(doi.toString()); if (doi.getTargetURL().toString().equals(dbDoiURL)) return true; return false; } private DOI calculateDOIDataFile(Item item, VersionHistory history) throws IOException, IdentifierException, AuthorizeException, SQLException { String doiString; DCValue[] pkgLink = item.getMetadata("dc.relation.ispartof"); if (pkgLink == null) { throw new RuntimeException("Not linked to a data package"); } if (!(doiString = pkgLink[0].value).startsWith("doi:")) { throw new DOIFormatException("isPartOf value doesn't start with 'doi:'"); } log.warn("calculateDOIDataFile() - is part of: " + doiString); // Versioning: if it is a new version of an existing Item, the new DOI must be: oldDOI.(versionNumber) if (history != null) { // NEW VERSION OF AN EXISTING ITEM Version version = history.getVersion(item); Version previous = history.getPrevious(version); log.warn("calculateDOIDataFile() - new version of an existing - version: " + version.getVersionNumber()); log.warn("calculateDOIDataFile() - new version of an existing - previous: " + previous.getVersionNumber()); String idPrevious=null; // FIRST time a VERSION is created: update identifier of the previous item adding ".1" before / if (history.isFirstVersion(previous)) { log.warn("calculateDOIDataFile() - updateIdentifierPreviousDF()"); idPrevious= updateIdentifierPreviousDF(previous.getItem()); } else idPrevious = DOIIdentifierProvider.getDoiValue(previous.getItem()); // mint NEW DOI: taking first part from id dataPackage father (until the /) + taking last part from id previous dataFile (after the slash) e.g., 1111.3 / 1.1 log.warn("calculateDOIDataFile() - new version of an existing - idPrevious: " + idPrevious); String suffixDF = idPrevious.substring(idPrevious.lastIndexOf(SLASH) + 1); log.warn("calculateDOIDataFile() - new version of an existing - suffixDF: " + suffixDF); // the item has been modified? if yes: increment version number DOI childDOI=null; if(countBitstreams(previous.getItem())!= countBitstreams(item)){ log.warn("calculateDOIDataFile() - new version of an existing - dataFile modified"); int versionN = Integer.parseInt(suffixDF.substring(suffixDF.lastIndexOf(DOT)+1)); log.warn("calculateDOIDataFile() - new version of an existing - dataFile modified - doiString" + doiString); log.warn("calculateDOIDataFile() - new version of an existing - dataFile modified - suffixDF" + suffixDF); log.warn("calculateDOIDataFile() - new version of an existing - dataFile modified - versionN" + versionN); childDOI = new DOI(doiString + "/" + suffixDF.substring(0, suffixDF.lastIndexOf(DOT)) + DOT + (versionN+1), item); } else{ log.warn("calculateDOIDataFile() - new version of an existing - dataFile not modified - doiString" + doiString); log.warn("calculateDOIDataFile() - new version of an existing - dataFile not modified - suffixDF" + suffixDF); childDOI = new DOI(doiString + "/" + suffixDF, item); } log.warn("calculateDOIDataFile() - new version of an existing: " + childDOI); return childDOI; } else { // NEW ITEM: mint a new DOI // has an arbitrary max; in reality much, much less for (int index = 1; index < MAX_NUM_OF_FILES; index++) { // check if canonical already exists String idDOI = getCanonicalDataPackage(doiString) + "/" + index; if (existsIdDOI(idDOI)) { String dbDoiURL = lookup(idDOI); if (dbDoiURL.equals(DOI.getInternalForm(item))) { log.warn("calculateDOIDataFile() - new item canonical exists: " + (doiString + "/" + index)); return new DOI(doiString + "/" + index, item); } } else { log.warn("calculateDOIDataFile() - new item canonical not exists: " + (doiString + "/" + index)); DOI childDOI = new DOI(doiString + "/" + index, item); return childDOI; } } } return null; } /** * If a bitstream is added to or removed from the DataFile, we have to upgrade the version number * only if the item is already versioned and if it wasn't already upgraded. * @return */ private DOI upgradeDOIDataFile(Context c, String idDoi, Item item, VersionHistory history) throws SQLException, AuthorizeException { DOI doi=null; if (history != null) { // only if it is already versioned Version version = history.getVersion(item); if(history.isLastVersion(version)){ // only if the user is modifying the last version Version previous = history.getPrevious(version); String idPrevious = DOIIdentifierProvider.getDoiValue(previous.getItem()); String suffixIdPrevious=idPrevious.substring(idPrevious.lastIndexOf(SLASH)+1); String suffixIdDoi=idDoi.substring(idDoi.lastIndexOf(SLASH)+1); if(suffixIdPrevious.equals(suffixIdDoi)){ // only if it is not upgraded if(countBitstreams(previous.getItem())!= countBitstreams(item)){ // only if a bitstream was added or removed int versionN = Integer.parseInt(suffixIdPrevious.substring(suffixIdPrevious.lastIndexOf(DOT)+1)); String prefix=idDoi.substring(0, idDoi.lastIndexOf(DOT)); String newDoi=prefix + DOT + (versionN+1); doi = new DOI(newDoi, item); updateHasPartDataPackage(c, item, doi.toString(), idDoi); } } } } return doi; } private int countBitstreams(Item item) throws SQLException { int numberOfBitsream=0; for (Bundle b : item.getBundles()) { for (Bitstream bit : b.getBitstreams()) { numberOfBitsream++; } } return numberOfBitsream; } private String updateIdentifierPreviousItem(Item item) throws AuthorizeException, SQLException { DCValue[] doiVals = item.getMetadata(DOIIdentifierProvider.identifierMetadata.schema, DOIIdentifierProvider.identifierMetadata.element, DOIIdentifierProvider.identifierMetadata.qualifier, Item.ANY); String id = doiVals[0].value; item.clearMetadata(DOIIdentifierProvider.identifierMetadata.schema, DOIIdentifierProvider.identifierMetadata.element, DOIIdentifierProvider.identifierMetadata.qualifier, Item.ANY); id += DOT + "1"; item.addMetadata(DOIIdentifierProvider.identifierMetadata.schema, DOIIdentifierProvider.identifierMetadata.element, DOIIdentifierProvider.identifierMetadata.qualifier, null, id); item.update(); return id; } private String updateIdentifierPreviousDF(Item item) throws AuthorizeException, SQLException { DCValue[] doiVals = item.getMetadata(DOIIdentifierProvider.identifierMetadata.schema, DOIIdentifierProvider.identifierMetadata.element, DOIIdentifierProvider.identifierMetadata.qualifier, Item.ANY); String id = doiVals[0].value; item.clearMetadata(DOIIdentifierProvider.identifierMetadata.schema, DOIIdentifierProvider.identifierMetadata.element, DOIIdentifierProvider.identifierMetadata.qualifier, Item.ANY); String prefix = id.substring(0, id.lastIndexOf(SLASH)); String suffix = id.substring(id.lastIndexOf(SLASH)); id = prefix + DOT + "1" + suffix + DOT + "1"; item.addMetadata(DOIIdentifierProvider.identifierMetadata.schema, DOIIdentifierProvider.identifierMetadata.element, DOIIdentifierProvider.identifierMetadata.qualifier, null, id); item.update(); return id; } private String revertIdentierItem(Item item) throws AuthorizeException, SQLException { DCValue[] doiVals = item.getMetadata(DOIIdentifierProvider.identifierMetadata.schema, DOIIdentifierProvider.identifierMetadata.element, DOIIdentifierProvider.identifierMetadata.qualifier, Item.ANY); String id = doiVals[0].value; item.clearMetadata(DOIIdentifierProvider.identifierMetadata.schema, DOIIdentifierProvider.identifierMetadata.element, DOIIdentifierProvider.identifierMetadata.qualifier, Item.ANY); id = id.substring(0, id.lastIndexOf(DOT)); item.addMetadata(DOIIdentifierProvider.identifierMetadata.schema, DOIIdentifierProvider.identifierMetadata.element, DOIIdentifierProvider.identifierMetadata.qualifier, null, id); item.update(); return id; } private String revertIdentifierDF(Item item) throws AuthorizeException, SQLException { DCValue[] doiVals = item.getMetadata(DOIIdentifierProvider.identifierMetadata.schema, DOIIdentifierProvider.identifierMetadata.element, DOIIdentifierProvider.identifierMetadata.qualifier, Item.ANY); String id = doiVals[0].value; item.clearMetadata(DOIIdentifierProvider.identifierMetadata.schema, DOIIdentifierProvider.identifierMetadata.element, DOIIdentifierProvider.identifierMetadata.qualifier, Item.ANY); String prefix = id.substring(0, id.lastIndexOf(SLASH)); String suffix = id.substring(id.lastIndexOf(SLASH)); prefix = prefix.substring(0, prefix.lastIndexOf(DOT)); suffix = suffix.substring(0, suffix.lastIndexOf(DOT)); id = prefix + suffix; item.addMetadata(DOIIdentifierProvider.identifierMetadata.schema, DOIIdentifierProvider.identifierMetadata.element, DOIIdentifierProvider.identifierMetadata.qualifier, null, id); item.update(); return id; } private void updateHasPartDataPackage(Context c, Item item, String idNew, String idOld) throws AuthorizeException, SQLException { Item dataPackage =org.dspace.workflow.DryadWorkflowUtils.getDataPackage(c, item); DCValue[] doiVals = dataPackage.getMetadata(DOIIdentifierProvider.identifierMetadata.schema, "relation", "haspart", Item.ANY); dataPackage.clearMetadata(DOIIdentifierProvider.identifierMetadata.schema, "relation", "haspart", null); for(DCValue value : doiVals){ if(!value.value.equals(idOld)) dataPackage.addMetadata(DOIIdentifierProvider.identifierMetadata.schema, "relation", "haspart", null, value.value); } dataPackage.addMetadata(DOIIdentifierProvider.identifierMetadata.schema, "relation", "haspart", null, idNew); dataPackage.update(); } private boolean existsIdDOI(String idDoi) { // This method is used to check if a newly generated DOI String collides // with an existing DOI. Since the DOIs are randomly-generated, // collisions are possible. String dbDoiId = lookup(idDoi.toString()); if (dbDoiId != null && !dbDoiId.equals("")) return true; return false; } private DOI getCanonicalDataPackage(DOI doi, Item item) { String canonicalID = doi.toString().substring(0, doi.toString().lastIndexOf(DOT)); DOI canonical = new DOI(canonicalID, item); return canonical; } private String getCanonicalDataPackage(String doi) { // no version present if(countDots(doi) <=2) return doi; String canonicalID = doi.toString().substring(0, doi.toString().lastIndexOf(DOT)); return canonicalID; } private short countDots(String doi){ short index=0; int indexDot=0; while( (indexDot=doi.indexOf(DOT))!=-1){ doi=doi.substring(indexDot+1); index++; } return index; } /** * input doi.toString()= doi:10.5061/dryad.9054.1/1.1 * output doi.toString()= 2rdfer334/1 */ private DOI getCanonicalDataFile(DOI doi, Item item) { log.warn("getCanonicalDataFile() doi in input: " + doi); // doi:10.5061/dryad.9054.1 (based on the input example) String idDP = doi.toString().substring(0, doi.toString().lastIndexOf(SLASH)); // idDF=1.1 String idDF = doi.toString().substring(doi.toString().lastIndexOf(SLASH) + 1); String canonicalDP = idDP.substring(0, idDP.lastIndexOf(DOT)); String canonicalDF = idDF; if(idDF.lastIndexOf(DOT)!=-1){ canonicalDF=idDF.substring(0, idDF.lastIndexOf(DOT)); } DOI canonical = new DOI(canonicalDP + SLASH + canonicalDF, item); return canonical; } private String getCollection(Context context, Item item) throws SQLException { String collectionResult = null; if(item.getOwningCollection()!=null) return item.getOwningCollection().getHandle(); // If our item is a workspaceitem it cannot have a collection, so we will need to get our collection from the workspace item return getCollectionFromWI(context, item.getID()).getHandle(); } private Collection getCollectionFromWI(Context context, int itemId) throws SQLException { TableRow row = DatabaseManager.querySingleTable(context, "workspaceitem", "SELECT collection_id FROM workspaceitem WHERE item_id= ?", itemId); if (row != null) return Collection.find(context, row.getIntColumn("collection_id")); row = DatabaseManager.querySingleTable(context, "workflowitem", "SELECT collection_id FROM workflowitem WHERE item_id= ?", itemId); if (row != null) return Collection.find(context, row.getIntColumn("collection_id")); throw new RuntimeException("Collection not found for item: " + itemId); } private URL getTarget(String aDSpaceURL) { URL target; try { target = new URL(aDSpaceURL); } catch (MalformedURLException details) { try { log.debug("Using " + myHostname + " for URL domain name"); // If we aren't given a full URL, create one with config value if (aDSpaceURL.startsWith("/")) { target = new URL(myHostname + aDSpaceURL); } else { target = new URL(myHostname + "/handle/" + aDSpaceURL); } } catch (MalformedURLException moreDetails) { throw new RuntimeException("Passed URL isn't a valid URL: " + aDSpaceURL); } } return target; } public String[] stripHandle(String aHDL) { int start = aHDL.lastIndexOf(myHdlPrefix + "/") + 1 + myHdlPrefix.length(); String id; if (start > myHdlPrefix.length()) { id = aHDL.substring(start, aHDL.length()); return new String[]{myHdlPrefix + "/" + id, id}; } else { return new String[]{myHdlPrefix + "/" + aHDL, aHDL}; } } private String buildVar() { String bigInt = new BigInteger(mySuffixVarLength * 5, myRandom).toString(32); StringBuilder buffer = new StringBuilder(bigInt); int charCount = 0; while (buffer.length() < mySuffixVarLength) { buffer.append('0'); } for (int index = 0; index < buffer.length(); index++) { char character = buffer.charAt(index); int random; if (character == 'a' | character == 'l' | character == 'e' | character == 'i' | character == 'o' | character == 'u') { random = myRandom.nextInt(9); buffer.setCharAt(index, String.valueOf(random).charAt(0)); charCount = 0; } else if (Character.isLetter(character)) { charCount += 1; if (charCount > 2) { random = myRandom.nextInt(9); buffer.setCharAt(index, String.valueOf(random).charAt(0)); charCount = 0; } } } return buffer.toString(); } private VersionHistory retrieveVersionHistory(Context c, Item item) { VersioningService versioningService = new DSpace().getSingletonService(VersioningService.class); VersionHistory history = versioningService.findVersionHistory(c, item.getID()); return history; } private Version getVersion(Context c, Item item) { VersioningService versioningService = new DSpace().getSingletonService(VersioningService.class); return versioningService.getVersion(c, item); } }
package com.github.aureliano.edocs.domain.dao; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.sql.ResultSet; import java.sql.SQLException; import java.util.List; import org.junit.Before; import org.junit.Test; import com.github.aureliano.edocs.common.exception.ValidationException; import com.github.aureliano.edocs.common.message.ContextMessage; import com.github.aureliano.edocs.common.message.SeverityLevel; import com.github.aureliano.edocs.common.persistence.IDao; import com.github.aureliano.edocs.common.persistence.IPersistenceManager; import com.github.aureliano.edocs.common.persistence.PersistenceService; import com.github.aureliano.edocs.domain.entity.User; import com.github.aureliano.edocs.domain.helper.PersistenceHelper; public class UserDaoTest { private IDao<User> dao; public UserDaoTest() { PersistenceHelper.instance().prepareDatabase(); this.dao = new UserDao(); } @Before public void beforeTest() throws SQLException { PersistenceHelper.instance().deleteAllRecords();; } @Test public void testValidateSaveAction() { this.checkInvalidName(); this.checkInvalidPassword(); } @Test public void testSave() { User u = new User() .withName("agustine") .withPassword("test123") .withDbUser(null); User user = this.dao.save(u); assertNotNull(user.getId()); assertEquals(u.getName(), user.getName()); assertEquals(u.getPassword(), user.getPassword()); assertFalse(user.getDbUser()); u = new User() .withName("alfonse") .withPassword("test123") .withDbUser(true); user = this.dao.save(u); assertTrue(user.getDbUser()); } @Test public void testSaveErrorUniqueName() { User u = new User() .withName("agustine") .withPassword("test123") .withDbUser(null); this.dao.save(u); try { String sql = "insert into users(name, password) values('augustine', 'test123')"; PersistenceHelper.instance().executeUpdate(sql); } catch (SQLException ex) { assertEquals("23505", ex.getSQLState()); } } @Test public void testDeleteByEntity() throws SQLException { String name = "delete-by-entity"; User u = new User() .withName(name) .withPassword("test123"); User user = this.dao.save(u); ResultSet rs = PersistenceHelper.instance().executeQuery("select count(id) from users where name = '" + name + "'"); rs.next(); assertEquals(1, rs.getInt(1)); rs.close(); this.dao.delete(user); rs = PersistenceHelper.instance().executeQuery("select count(id) from users where name = '" + name + "'"); rs.next(); assertEquals(0, rs.getInt(1)); rs.close(); } @Test public void testDeleteById() throws SQLException { String name = "delete-by-id"; User u = new User() .withName(name) .withPassword("test123"); User user = this.dao.save(u); ResultSet rs = PersistenceHelper.instance().executeQuery("select count(id) from users where name = '" + name + "'"); rs.next(); assertEquals(1, rs.getInt(1)); rs.close(); this.dao.delete(user.getId()); rs = PersistenceHelper.instance().executeQuery("select count(id) from users where name = '" + name + "'"); rs.next(); assertEquals(0, rs.getInt(1)); rs.close(); } @Test public void testFind() { User u = new User() .withName("iacopo") .withPassword("test123"); User user1 = this.dao.save(u); User user2 = this.dao.find(user1.getId()); assertEquals(user1, user2); } @Test public void testSearchByEntity() { User u = new User() .withName("petrus") .withPassword("test123"); User user1 = this.dao.save(u); this.dao.save(new User() .withName("maria") .withPassword("test12345")); List<User> data = this.dao.search(user1); assertEquals(1, data.size()); User user2 = data.get(0); assertEquals(user1, user2); } @Test public void testSearchByQuery() { User u = new User() .withName("petrus") .withPassword("test123"); User user1 = this.dao.save(u); this.dao.save(new User() .withName("maria") .withPassword("test12345")); List<User> data = this.dao.search("select * from users where name = 'petrus'"); assertEquals(1, data.size()); User user2 = data.get(0); assertEquals(user1, user2); } private void checkInvalidName() { User u = new User().withPassword("test123"); this.validateContextMessage(u, "Expected to find a not empty text for field name."); u.withName("1"); this.validateContextMessage(u, "Expected field name to have size between 3 and 25 but got 1."); u.withName("1234567890123456789012345"); this.validateContextMessage(u, "Expected field name to have size between 3 and 25 but got 1."); } private void checkInvalidPassword() { User u = new User().withName("caesar-augustus"); this.validateContextMessage(u, "Expected to find a not empty text for field password."); u.withPassword("1"); this.validateContextMessage(u, "Expected field password to have size between 3 and 25 but got 1."); u.withPassword("1234567890123456789012345"); this.validateContextMessage(u, "Expected field password to have size between 3 and 25 but got 1."); } private void validateContextMessage(User user, String message) { IPersistenceManager pm = PersistenceService.instance().getPersistenceManager(); pm.clearContextMessages(); try { this.dao.save(user); } catch (ValidationException ex) { assertEquals(1, pm.getContextMessages().size()); ContextMessage m = pm.getContextMessages().iterator().next(); assertEquals(SeverityLevel.ERROR, m.getSeverityLevel()); assertEquals(message, m.getMessage()); } } }
package com.jgardella.app.backend; import java.time.LocalDateTime; public class Event implements Comparable<Event> { private String name; private String type; private LocalDateTime date; private ArrayList<Member> attendance; public Event(String name, String type, LocalDateTime date) { this.name = name; this.type = type; this.date = date; this.attendance = new ArrayList<Member>(); } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getType() { return type; } public void setType(String type) { this.type = type; } public LocalDateTime getDate() { return date; } public void setDate(LocalDateTime date) { this.date = date; } public void addMemberToAttendance(Member member) { attendance.add(member); } public int compareTo(Event event) { return date.compareTo(event.date); } }
package com.maestrano.net; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.ProtocolException; import java.net.URL; import java.util.HashMap; import java.util.Map; public class MnoHttpClient { private String defaultUserAgent; public MnoHttpClient() { this.defaultUserAgent = "maestrano-java/" + System.getProperty("java.version"); } /** * Perform a GET request on the specified endpoint * @param url * @return response body * @throws IOException */ public String get(String url) throws IOException { return performRequest(url,"GET",null); } /** * Perform a GET request on the specified endpoint * @param url * @return response body * @throws IOException */ public String get(String url, Map<String,String> header) throws IOException { return performRequest(url,"GET",header); } /** * Perform a POST request on the specified endpoint * @param url * @param header * @param payload * @return response body * @throws IOException */ public String post(String url, Map<String,String> header, String payload) throws IOException { return performRequest(url,"POST",header,payload); } /** * Perform a PUT request on the specified endpoint * @param url * @param header * @param payload * @return response body * @throws IOException */ public String delete(String url, Map<String,String> header, String payload) throws IOException { return performRequest(url,"DELETE",header,payload); } /** * Perform a PUT request on the specified endpoint * @param url * @param header * @param payload * @return response body * @throws IOException */ public String put(String url, Map<String,String> header, String payload) throws IOException { return performRequest(url,"PUT",header,payload); } /** * Perform a request to the remote endpoint * @param url the remote endpoint to contact * @param method such as 'GET', 'PUT', 'POST' or 'DELETE' * @return response body * @throws IOException */ protected String performRequest(String url, String method) throws IOException { return performRequest(url,method,null); } /** * Perform a request to the remote endpoint * @param url the remote endpoint to contact * @param method such as 'GET', 'PUT', 'POST' or 'DELETE' * @param header values * @return response body * @throws IOException */ protected String performRequest(String url, String method, Map<String,String> header) throws IOException { return performRequest(url,method,header,null); } /** * Perform a request to the remote endpoint * @param url the remote endpoint to contact * @param method such as 'GET', 'PUT', 'POST' or 'DELETE' * @param header values * @param payload data to send * @return response body * @throws IOException */ protected String performRequest(String url, String method, Map<String,String> header, String payload) throws IOException { // Prepare header if (header == null) { header = new HashMap<String,String>(); } // Set method header.put("method",method.toUpperCase()); // Set user agent if (header.get("User-Agent") == null || header.get("User-Agent").isEmpty()) { header.put("User-Agent",defaultUserAgent); } // Get connection HttpURLConnection conn = openConnection(url,header); // Send Data if PUT/POST if (payload != null) { if (method.equalsIgnoreCase("PUT") || method.equalsIgnoreCase("POST")) { OutputStream output = null; try { output = conn.getOutputStream(); output.write(payload.getBytes()); } finally { if (output != null) { output.close(); } } } } // Parse response BufferedReader in = new BufferedReader(new InputStreamReader(conn.getInputStream())); String inputLine; StringBuffer html = new StringBuffer(); while ((inputLine = in.readLine()) != null) { html.append(inputLine); } in.close(); return html.toString(); } /** * Open a connection and follow the redirect * @param String url * @param header contain * @return HttpURLConnection connection * @throws IOException */ protected HttpURLConnection openConnection(String url, Map<String,String> header) throws IOException { // Initialize connection URL urlObj = null; HttpURLConnection conn = null; int redirectCount = 0; boolean redirect = true; while (redirect) { if (redirectCount > 10) { throw new ProtocolException("Too many redirects: " + redirectCount); } if (conn == null) { urlObj = new URL(url); } else { // get redirect url from "location" header field urlObj = new URL(conn.getHeaderField("Location")); } // open the new connection again conn = (HttpURLConnection) urlObj.openConnection(); conn.setRequestMethod(header.get("method")); conn.addRequestProperty("User-Agent", header.get("User-Agent")); conn.setInstanceFollowRedirects(true); // Check if redirect redirect = false; int status = conn.getResponseCode(); if (status != HttpURLConnection.HTTP_OK) { if (status == HttpURLConnection.HTTP_MOVED_TEMP || status == HttpURLConnection.HTTP_MOVED_PERM || status == HttpURLConnection.HTTP_SEE_OTHER) redirect = true; redirectCount ++; } } return conn; } }
package com.qiniu.storage; import com.google.gson.JsonNull; import com.google.gson.JsonObject; import com.qiniu.common.Constants; import com.qiniu.common.QiniuException; import com.qiniu.http.Client; import com.qiniu.http.Response; import com.qiniu.storage.model.*; import com.qiniu.util.*; import java.util.*; public final class BucketManager { /** * Auth * QBoxAuth */ private final Auth auth; /** * Configuration * HTTP */ private Configuration configuration; /** * HTTP Client * HTTP */ private final Client client; /** * BucketManager * * @param auth Auth * @param cfg Configuration */ public BucketManager(Auth auth, Configuration cfg) { this.auth = auth; this.configuration = cfg.clone(); client = new Client(configuration); } public BucketManager(Auth auth, Client client) { this.auth = auth; this.client = client; this.configuration = new Configuration(); } public static String encodedEntry(String bucket, String key) { String encodedEntry; if (key != null) { encodedEntry = UrlSafeBase64.encodeToString(bucket + ":" + key); } else { encodedEntry = UrlSafeBase64.encodeToString(bucket); } return encodedEntry; } public static String encodedEntry(String bucket) { return encodedEntry(bucket, null); } /** * * * @return */ public String[] buckets() throws QiniuException { // bucket rs.qiniu.com or rs.qbox.me @ String url = String.format("%s/buckets", configuration.rsHost()); Response res = get(url); if (!res.isOK()) { throw new QiniuException(res); } String[] buckets = res.jsonToObject(String[].class); res.close(); return buckets; } public void createBucket(String bucketName, String region) throws QiniuException { String url = String.format("%s/mkbucketv2/%s/region/%s", configuration.rsHost(), UrlSafeBase64.encodeToString(bucketName), region); Response res = post(url, null); if (!res.isOK()) { throw new QiniuException(res); } res.close(); } public void deleteBucket(String bucketname) throws QiniuException { String url = String.format("%s/drop/%s", configuration.rsHost(), bucketname); Response res = post(url, null); if (!res.isOK()) { throw new QiniuException(res); } res.close(); } /** * domain * * @param bucket * @return domain * @throws QiniuException */ public String[] domainList(String bucket) throws QiniuException { String url = String.format("%s/v6/domain/list?tbl=%s", configuration.apiHost(), bucket); Response res = get(url); if (!res.isOK()) { throw new QiniuException(res); } String[] domains = res.jsonToObject(String[].class); res.close(); return domains; } /** * * * @param bucket * @param prefix * @return FileInfo */ public FileListIterator createFileListIterator(String bucket, String prefix) { return new FileListIterator(bucket, prefix, 1000, null); } /** * * * @param bucket * @param prefix * @param limit 1000 100 * @param delimiter * @return FileInfo */ public FileListIterator createFileListIterator(String bucket, String prefix, int limit, String delimiter) { return new FileListIterator(bucket, prefix, limit, delimiter); } private String listQuery(String bucket, String prefix, String marker, int limit, String delimiter) { StringMap map = new StringMap().put("bucket", bucket).putNotEmpty("marker", marker) .putNotEmpty("prefix", prefix).putNotEmpty("delimiter", delimiter).putWhen("limit", limit, limit > 0); return map.formString(); } /** * v1 response * * @param bucket * @param prefix * @param marker marker * @param limit 1000 100 * @param delimiter * @return * @throws QiniuException */ public Response listV1(String bucket, String prefix, String marker, int limit, String delimiter) throws QiniuException { String url = String.format("%s/list?%s", configuration.rsfHost(auth.accessKey, bucket), listQuery(bucket, prefix, marker, limit, delimiter)); return get(url); } public FileListing listFiles(String bucket, String prefix, String marker, int limit, String delimiter) throws QiniuException { Response response = listV1(bucket, prefix, marker, limit, delimiter); if (!response.isOK()) { throw new QiniuException(response); } FileListing fileListing = response.jsonToObject(FileListing.class); response.close(); return fileListing; } /** * v2 response v2 response body * string stream * * @param bucket * @param prefix * @param marker marker * @param limit 10000 * @param delimiter * @return Response okhttp response * @throws QiniuException */ public Response listV2(String bucket, String prefix, String marker, int limit, String delimiter) throws QiniuException { String url = String.format("%s/v2/list?%s", configuration.rsfHost(auth.accessKey, bucket), listQuery(bucket, prefix, marker, limit, delimiter)); return get(url); } public FileListing listFilesV2(String bucket, String prefix, String marker, int limit, String delimiter) throws QiniuException { Response response = listV2(bucket, prefix, marker, limit, delimiter); final String result = response.bodyString(); response.close(); List<String> lineList = Arrays.asList(result.split("\n")); FileListing fileListing = new FileListing(); List<FileInfo> fileInfoList = new ArrayList<>(); Set<String> commonPrefixSet = new HashSet<>(); for (int i = 0; i < lineList.size(); i++) { String line = lineList.get(i); JsonObject jsonObject = Json.decode(line, JsonObject.class); if (!(jsonObject.get("item") instanceof JsonNull)) fileInfoList.add(Json.decode(jsonObject.get("item"), FileInfo.class)); String dir = jsonObject.get("dir").getAsString(); if (!"".equals(dir)) commonPrefixSet.add(dir); if (i == lineList.size() - 1) fileListing.marker = jsonObject.get("marker").getAsString(); } fileListing.items = fileInfoList.toArray(new FileInfo[]{}); fileListing.commonPrefixes = commonPrefixSet.toArray(new String[]{}); return fileListing; } public FileInfo stat(String bucket, String fileKey) throws QiniuException { Response res = rsGet(bucket, String.format("/stat/%s", encodedEntry(bucket, fileKey))); if (!res.isOK()) { throw new QiniuException(res); } FileInfo fileInfo = res.jsonToObject(FileInfo.class); res.close(); return fileInfo; } public Response delete(String bucket, String key) throws QiniuException { return rsPost(bucket, String.format("/delete/%s", encodedEntry(bucket, key)), null); } public Response changeMime(String bucket, String key, String mime) throws QiniuException { String resource = encodedEntry(bucket, key); String encodedMime = UrlSafeBase64.encodeToString(mime); String path = String.format("/chgm/%s/mime/%s", resource, encodedMime); return rsPost(bucket, path, null); } public Response changeHeaders(String bucket, String key, Map<String, String> headers) throws QiniuException { String resource = encodedEntry(bucket, key); String path = String.format("/chgm/%s", resource); for (String k : headers.keySet()) { String encodedMetaValue = UrlSafeBase64.encodeToString(headers.get(k)); path = String.format("%s/x-qn-meta-!%s/%s", path, k, encodedMetaValue); } return rsPost(bucket, path, null); } /** * * * @param bucket * @param key * @param type type=0 type=1 * @throws QiniuException */ public Response changeType(String bucket, String key, StorageType type) throws QiniuException { String resource = encodedEntry(bucket, key); String path = String.format("/chtype/%s/type/%d", resource, type.ordinal()); return rsPost(bucket, path, null); } /** * * * @param bucket * @param key * @param status 01 * @throws QiniuException */ public Response changeStatus(String bucket, String key, int status) throws QiniuException { String resource = encodedEntry(bucket, key); String path = String.format("/chstatus/%s/status/%d", resource, status); return rsPost(bucket, path, null); } /** * forcetrue * * @param bucket * @param oldFileKey * @param newFileKey * @param force newFileKey * @throws QiniuException */ public Response rename(String bucket, String oldFileKey, String newFileKey, boolean force) throws QiniuException { return move(bucket, oldFileKey, bucket, newFileKey, force); } public Response rename(String bucket, String oldFileKey, String newFileKey) throws QiniuException { return move(bucket, oldFileKey, bucket, newFileKey); } /** * forcetrue * * @param fromBucket * @param fromFileKey * @param toBucket * @param toFileKey * @param force toFileKey * @throws QiniuException */ public Response copy(String fromBucket, String fromFileKey, String toBucket, String toFileKey, boolean force) throws QiniuException { String from = encodedEntry(fromBucket, fromFileKey); String to = encodedEntry(toBucket, toFileKey); String path = String.format("/copy/%s/%s/force/%s", from, to, force); return rsPost(fromBucket, path, null); } /** * * * @param fromBucket * @param fromFileKey * @param toBucket * @param toFileKey * @throws QiniuException */ public void copy(String fromBucket, String fromFileKey, String toBucket, String toFileKey) throws QiniuException { Response res = copy(fromBucket, fromFileKey, toBucket, toFileKey, false); if (!res.isOK()) { throw new QiniuException(res); } res.close(); } /** * * * @param fromBucket * @param fromFileKey * @param toBucket * @param toFileKey * @param force toFileKey * @throws QiniuException */ public Response move(String fromBucket, String fromFileKey, String toBucket, String toFileKey, boolean force) throws QiniuException { String from = encodedEntry(fromBucket, fromFileKey); String to = encodedEntry(toBucket, toFileKey); String path = String.format("/move/%s/%s/force/%s", from, to, force); return rsPost(fromBucket, path, null); } /** * , forcetrue * * @param fromBucket * @param fromFileKey * @param toBucket * @param toFileKey * @throws QiniuException */ public Response move(String fromBucket, String fromFileKey, String toBucket, String toFileKey) throws QiniuException { return move(fromBucket, fromFileKey, toBucket, toFileKey, false); } /** * * url * etag * * @param url * @param bucket * @throws QiniuException */ public FetchRet fetch(String url, String bucket) throws QiniuException { return fetch(url, bucket, null); } /** * * url * * @param url * @param bucket * @param key * @throws QiniuException */ public FetchRet fetch(String url, String bucket, String key) throws QiniuException { String resource = UrlSafeBase64.encodeToString(url); String to = encodedEntry(bucket, key); String path = String.format("/fetch/%s/to/%s", resource, to); Response res = ioPost(bucket, path); if (!res.isOK()) { throw new QiniuException(res); } FetchRet fetchRet = res.jsonToObject(FetchRet.class); res.close(); return fetchRet; } public Response asynFetch(String url, String bucket, String key) throws QiniuException { String requesturl = configuration.apiHost(auth.accessKey, bucket) + "/sisyphus/fetch"; StringMap stringMap = new StringMap().put("url", url).put("bucket", bucket).putNotNull("key", key); byte[] bodyByte = Json.encode(stringMap).getBytes(Constants.UTF_8); return client.post(requesturl, bodyByte, auth.authorizationV2(requesturl, "POST", bodyByte, "application/json"), Client.JsonMime); } public Response asynFetch(String url, String bucket, String key, String md5, String etag, String callbackurl, String callbackbody, String callbackbodytype, String callbackhost, String fileType) throws QiniuException { String requesturl = configuration.apiHost(auth.accessKey, bucket) + "/sisyphus/fetch"; StringMap stringMap = new StringMap().put("url", url).put("bucket", bucket). putNotNull("key", key).putNotNull("md5", md5).putNotNull("etag", etag). putNotNull("callbackurl", callbackurl).putNotNull("callbackbody", callbackbody). putNotNull("callbackbodytype", callbackbodytype). putNotNull("callbackhost", callbackhost).putNotNull("file_type", fileType); byte[] bodyByte = Json.encode(stringMap).getBytes(Constants.UTF_8); return client.post(requesturl, bodyByte, auth.authorizationV2(requesturl, "POST", bodyByte, "application/json"), Client.JsonMime); } /** * * * @param region bucket z0 z1 z2 na0 as0 * @param fetchWorkId id * @return Response * @throws QiniuException */ public Response checkAsynFetchid(String region, String fetchWorkId) throws QiniuException { String path = String.format("http://api-%s.qiniu.com/sisyphus/fetch?id=%s", region, fetchWorkId); return client.get(path, auth.authorization(path)); } /** * * * * @param bucket * @param key * @throws QiniuException */ public void prefetch(String bucket, String key) throws QiniuException { String resource = encodedEntry(bucket, key); String path = String.format("/prefetch/%s", resource); Response res = ioPost(bucket, path); if (!res.isOK()) { throw new QiniuException(res); } res.close(); } /** * * * @param bucket * @param srcSiteUrl */ public Response setImage(String bucket, String srcSiteUrl) throws QiniuException { return setImage(bucket, srcSiteUrl, null); } /** * * * @param bucket * @param srcSiteUrl * @param host Host */ public Response setImage(String bucket, String srcSiteUrl, String host) throws QiniuException { String encodedSiteUrl = UrlSafeBase64.encodeToString(srcSiteUrl); String encodedHost = null; if (host != null && host.length() > 0) { encodedHost = UrlSafeBase64.encodeToString(host); } String path = String.format("/image/%s/from/%s", bucket, encodedSiteUrl); if (encodedHost != null) { path += String.format("/host/%s", encodedHost); } return pubPost(path); } /** * * * @param bucket */ public Response unsetImage(String bucket) throws QiniuException { String path = String.format("/unimage/%s", bucket); return pubPost(path); } /** * * * @param bucket * @param key * @param days */ public Response deleteAfterDays(String bucket, String key, int days) throws QiniuException { return rsPost(bucket, String.format("/deleteAfterDays/%s/%d", encodedEntry(bucket, key), days), null); } public void setBucketAcl(String bucket, AclType acl) throws QiniuException { String url = String.format("%s/private?bucket=%s&private=%s", configuration.ucHost(), bucket, acl.getType()); Response res = post(url, null); if (!res.isOK()) { throw new QiniuException(res); } res.close(); } public BucketInfo getBucketInfo(String bucket) throws QiniuException { String url = String.format("%s/v2/bucketInfo?bucket=%s", configuration.ucHost(), bucket); Response res = post(url, null); if (!res.isOK()) { throw new QiniuException(res); } BucketInfo info = res.jsonToObject(BucketInfo.class); res.close(); return info; } public void setIndexPage(String bucket, IndexPageType type) throws QiniuException { String url = String.format("%s/noIndexPage?bucket=%s&noIndexPage=%s", configuration.ucHost(), bucket, type.getType()); Response res = post(url, null); if (!res.isOK()) { throw new QiniuException(res); } res.close(); } private Response rsPost(String bucket, String path, byte[] body) throws QiniuException { check(bucket); String url = configuration.rsHost(auth.accessKey, bucket) + path; return post(url, body); } private Response rsGet(String bucket, String path) throws QiniuException { check(bucket); String url = configuration.rsHost(auth.accessKey, bucket) + path; return get(url); } private Response ioPost(String bucket, String path) throws QiniuException { check(bucket); String url = configuration.ioHost(auth.accessKey, bucket) + path; return post(url, null); } private Response pubPost(String path) throws QiniuException { String url = "http://pu.qbox.me:10200" + path; return post(url, null); } private Response get(String url) throws QiniuException { StringMap headers = auth.authorization(url); return client.get(url, headers); } private Response post(String url, byte[] body) throws QiniuException { StringMap headers = auth.authorization(url, body, Client.FormMime); return client.post(url, body, headers, Client.FormMime); } private void check(String bucket) throws QiniuException { if (StringUtils.isNullOrEmpty(bucket)) { throw new QiniuException(Response.createError(null, null, 0, "")); } } public Response batch(BatchOperations operations) throws QiniuException { return rsPost(operations.execBucket(), "/batch", operations.toBody()); } public static class BatchOperations { private ArrayList<String> ops; private String execBucket = null; public BatchOperations() { this.ops = new ArrayList<String>(); } /** * chgm */ public BatchOperations addChgmOp(String bucket, String key, String newMimeType) { String resource = encodedEntry(bucket, key); String encodedMime = UrlSafeBase64.encodeToString(newMimeType); ops.add(String.format("/chgm/%s/mime/%s", resource, encodedMime)); setExecBucket(bucket); return this; } /** * copy */ public BatchOperations addCopyOp(String fromBucket, String fromFileKey, String toBucket, String toFileKey) { String from = encodedEntry(fromBucket, fromFileKey); String to = encodedEntry(toBucket, toFileKey); ops.add(String.format("copy/%s/%s", from, to)); setExecBucket(fromBucket); return this; } public BatchOperations addRenameOp(String fromBucket, String fromFileKey, String toFileKey) { return addMoveOp(fromBucket, fromFileKey, fromBucket, toFileKey); } /** * move */ public BatchOperations addMoveOp(String fromBucket, String fromKey, String toBucket, String toKey) { String from = encodedEntry(fromBucket, fromKey); String to = encodedEntry(toBucket, toKey); ops.add(String.format("move/%s/%s", from, to)); setExecBucket(fromBucket); return this; } /** * delete */ public BatchOperations addDeleteOp(String bucket, String... keys) { for (String key : keys) { ops.add(String.format("delete/%s", encodedEntry(bucket, key))); } setExecBucket(bucket); return this; } /** * stat */ public BatchOperations addStatOps(String bucket, String... keys) { for (String key : keys) { ops.add(String.format("stat/%s", encodedEntry(bucket, key))); } setExecBucket(bucket); return this; } /** * changeType */ public BatchOperations addChangeTypeOps(String bucket, StorageType type, String... keys) { for (String key : keys) { ops.add(String.format("chtype/%s/type/%d", encodedEntry(bucket, key), type.ordinal())); } setExecBucket(bucket); return this; } /** * changeStatus */ public BatchOperations addChangeStatusOps(String bucket, int status, String... keys) { for (String key : keys) { ops.add(String.format("chstatus/%s/status/%d", encodedEntry(bucket, key), status)); } setExecBucket(bucket); return this; } /** * deleteAfterDays */ public BatchOperations addDeleteAfterDaysOps(String bucket, int days, String... keys) { for (String key : keys) { ops.add(String.format("deleteAfterDays/%s/%d", encodedEntry(bucket, key), days)); } setExecBucket(bucket); return this; } public byte[] toBody() { String body = StringUtils.join(ops, "&op=", "op="); return StringUtils.utf8Bytes(body); } public BatchOperations merge(BatchOperations batch) { this.ops.addAll(batch.ops); setExecBucket(batch.execBucket()); return this; } public BatchOperations clearOps() { this.ops.clear(); return this; } private void setExecBucket(String bucket) { if (execBucket == null) { execBucket = bucket; } } public String execBucket() { return execBucket; } } public class FileListIterator implements Iterator<FileInfo[]> { private String marker = null; private String bucket; private String delimiter; private int limit; private String prefix; private QiniuException exception = null; public FileListIterator(String bucket, String prefix, int limit, String delimiter) { if (limit <= 0) { throw new IllegalArgumentException("limit must greater than 0"); } if (limit > 1000) { throw new IllegalArgumentException("limit must not greater than 1000"); } this.bucket = bucket; this.prefix = prefix; this.limit = limit; this.delimiter = delimiter; } public QiniuException error() { return exception; } @Override public boolean hasNext() { return exception == null && !"".equals(marker); } @Override public FileInfo[] next() { try { FileListing f = listFiles(bucket, prefix, marker, limit, delimiter); this.marker = f.marker == null ? "" : f.marker; return f.items; } catch (QiniuException e) { this.exception = e; return null; } } @Override public void remove() { throw new UnsupportedOperationException("remove"); } } }
package commandparser; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; @RestController public class CommandController { @RequestMapping(value = "/parse_command", method = { RequestMethod.GET, RequestMethod.POST }) public Object parseCommand(@RequestParam(value="command", defaultValue="") String command) { command = command.toLowerCase(); System.out.println(command); try { final int LIST = 0; final int SEARCH = 1; final String[] commands = { "list", "search" }; final Pattern[] patterns = { Pattern.compile("^lister?( (chansons?|musiques?)?)?$"), Pattern.compile("^rechercher?( (artiste|titre)?)?( (.*))?$") }; for (int i = 0; i < patterns.length; i++) { Matcher m = patterns[i].matcher(command); if (m.find()) { switch (i) { case LIST: return new Command(commands[LIST]); case SEARCH: return new Search((m.group(2) != null) ? m.group(2) : "everything", (m.group(4) != null) ? m.group(4) : ""); } } } } catch (Exception e) {} return new Error("Could not parse command"); } }
package crazypants.enderio.fluid; import java.lang.reflect.Field; import crazypants.enderio.EnderIO; import crazypants.enderio.Log; import crazypants.enderio.config.Config; import crazypants.enderio.machine.generator.zombie.PacketNutrientTank; import crazypants.enderio.network.PacketHandler; import net.minecraft.block.Block; import net.minecraft.block.material.Material; import net.minecraft.block.state.IBlockState; import net.minecraft.client.renderer.ItemMeshDefinition; import net.minecraft.client.renderer.block.model.ModelResourceLocation; import net.minecraft.client.renderer.block.statemap.StateMapperBase; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.util.ResourceLocation; import net.minecraftforge.client.event.TextureStitchEvent; import net.minecraftforge.client.model.ModelLoader; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.fluids.Fluid; import net.minecraftforge.fluids.FluidRegistry; import net.minecraftforge.fluids.IFluidTank; import net.minecraftforge.fml.common.Loader; import net.minecraftforge.fml.common.event.FMLInterModComms; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; public class Fluids { public static final String NUTRIENT_DISTILLATION_NAME = "nutrient_distillation"; public static final String ENDER_DISTILLATION_NAME = "ender_distillation"; public static final String VAPOR_OF_LEVITY_NAME = "vapor_of_levity"; public static final String HOOTCH_NAME = "hootch"; public static final String ROCKET_FUEL_NAME = "rocket_fuel"; public static final String FIRE_WATER_NAME = "fire_water"; public static final String XP_JUICE_NAME = "xpjuice"; public static final String LIQUID_SUNSHINE_NAME = "liquid_sunshine"; public static final String CLOUD_SEED_NAME = "cloud_seed"; public static final String CLOUD_SEED_CONCENTRATED_NAME = "cloud_seed_concentrated"; public static Fluid fluidNutrientDistillation; public static BlockFluidEio blockNutrientDistillation; public static Fluid fluidHootch; public static BlockFluidEio blockHootch; public static Fluid fluidRocketFuel; public static BlockFluidEio blockRocketFuel; public static Fluid fluidFireWater; public static BlockFluidEio blockFireWater; public static Fluid fluidLiquidSunshine; public static Fluid fluidCloudSeed; public static Fluid fluidCloudSeedConcentrated; public static BlockFluidEio blockLiquidSunshine; public static BlockFluidEio blockCloudSeed; public static BlockFluidEio blockCloudSeedConcentrated; public static Fluid fluidEnderDistillation; public static BlockFluidEio blockEnderDistillation; public static Fluid fluidVaporOfLevity; public static BlockFluidEio blockVaporOfLevity; // Open block compatable liquid XP public static Fluid fluidXpJuice; public static ResourceLocation getStill(String fluidName) { return new ResourceLocation(EnderIO.DOMAIN, "blocks/" + fluidName + "_still"); } public static ResourceLocation getFlowing(String fluidName) { return new ResourceLocation(EnderIO.DOMAIN, "blocks/" + fluidName + "_flow"); } public static ResourceLocation getRaw(String fluidName) { return new ResourceLocation(EnderIO.DOMAIN, "blocks/" + fluidName); } public static String toCapactityString(IFluidTank tank) { if (tank == null) { return "0/0 " + MB(); } return tank.getFluidAmount() + "/" + tank.getCapacity() + " " + MB(); } public static String MB() { return EnderIO.lang.localize("fluid.millibucket.abr"); } public void registerFluids() { Fluid f = new Fluid(Fluids.NUTRIENT_DISTILLATION_NAME, getStill(Fluids.NUTRIENT_DISTILLATION_NAME), getFlowing(Fluids.NUTRIENT_DISTILLATION_NAME)) .setDensity(1500).setViscosity(3000); FluidRegistry.registerFluid(f); fluidNutrientDistillation = FluidRegistry.getFluid(f.getName()); blockNutrientDistillation = BlockFluidEio.create(fluidNutrientDistillation, Material.WATER, 0x5a5e00); PacketHandler.INSTANCE.registerMessage(PacketNutrientTank.class, PacketNutrientTank.class, PacketHandler.nextID(), Side.CLIENT); f = new Fluid(Fluids.ENDER_DISTILLATION_NAME, getStill(Fluids.ENDER_DISTILLATION_NAME), getFlowing(Fluids.ENDER_DISTILLATION_NAME)) .setDensity(200).setViscosity(1000).setTemperature(175); FluidRegistry.registerFluid(f); fluidEnderDistillation = FluidRegistry.getFluid(f.getName()); blockEnderDistillation = BlockFluidEio.create(fluidEnderDistillation, Material.WATER, 0x149535); f = new Fluid(Fluids.VAPOR_OF_LEVITY_NAME, getStill(Fluids.VAPOR_OF_LEVITY_NAME), getFlowing(Fluids.VAPOR_OF_LEVITY_NAME)) .setDensity(-10).setViscosity(100).setTemperature(5); FluidRegistry.registerFluid(f); fluidVaporOfLevity = FluidRegistry.getFluid(f.getName()); blockVaporOfLevity = BlockFluidEio.create(fluidVaporOfLevity, Material.WATER, 0x41716a); blockVaporOfLevity.setQuantaPerBlock(1); f = new Fluid(Fluids.HOOTCH_NAME, Fluids.getStill(Fluids.HOOTCH_NAME), Fluids.getFlowing(Fluids.HOOTCH_NAME)).setDensity(900).setViscosity(1000); FluidRegistry.registerFluid(f); fluidHootch = FluidRegistry.getFluid(f.getName()); blockHootch = BlockFluidEio.create(fluidHootch, Material.WATER, 0xffffff); FluidFuelRegister.instance.addFuel(f, Config.hootchPowerPerCycleRF, Config.hootchPowerTotalBurnTime); FMLInterModComms.sendMessage("Railcraft", "boiler-fuel-liquid", Fluids.HOOTCH_NAME + "@" + (Config.hootchPowerPerCycleRF / 10 * Config.hootchPowerTotalBurnTime)); f = new Fluid(Fluids.ROCKET_FUEL_NAME, Fluids.getStill(Fluids.ROCKET_FUEL_NAME), Fluids.getFlowing(Fluids.ROCKET_FUEL_NAME)).setDensity(900) .setViscosity(1000); FluidRegistry.registerFluid(f); fluidRocketFuel = FluidRegistry.getFluid(f.getName()); blockRocketFuel = BlockFluidEio.create(fluidRocketFuel, Material.WATER, 0x707044); FluidFuelRegister.instance.addFuel(f, Config.rocketFuelPowerPerCycleRF, Config.rocketFuelPowerTotalBurnTime); FMLInterModComms.sendMessage("Railcraft", "boiler-fuel-liquid", Fluids.ROCKET_FUEL_NAME + "@" + (Config.rocketFuelPowerPerCycleRF / 10 * Config.rocketFuelPowerTotalBurnTime)); f = new Fluid(Fluids.FIRE_WATER_NAME, Fluids.getStill(Fluids.FIRE_WATER_NAME), Fluids.getFlowing(Fluids.FIRE_WATER_NAME)).setDensity(900) .setViscosity(1000); FluidRegistry.registerFluid(f); fluidFireWater = FluidRegistry.getFluid(f.getName()); blockFireWater = BlockFluidEio.create(fluidFireWater, Material.LAVA, 0x8a490f); FluidFuelRegister.instance.addFuel(f, Config.fireWaterPowerPerCycleRF, Config.fireWaterPowerTotalBurnTime); FMLInterModComms.sendMessage("Railcraft", "boiler-fuel-liquid", Fluids.FIRE_WATER_NAME + "@" + (Config.fireWaterPowerPerCycleRF / 10 * Config.fireWaterPowerTotalBurnTime)); f = new Fluid(Fluids.LIQUID_SUNSHINE_NAME, getStill(LIQUID_SUNSHINE_NAME), getFlowing(LIQUID_SUNSHINE_NAME)).setDensity(200).setViscosity(400); FluidRegistry.registerFluid(f); fluidLiquidSunshine = FluidRegistry.getFluid(f.getName()); blockLiquidSunshine = BlockFluidEio.create(fluidLiquidSunshine, Material.WATER, 0xd2c561); blockLiquidSunshine.setLightLevel(1); f = new Fluid(Fluids.CLOUD_SEED_NAME, getStill(CLOUD_SEED_NAME), getFlowing(CLOUD_SEED_NAME)).setDensity(500).setViscosity(800); FluidRegistry.registerFluid(f); fluidCloudSeed = FluidRegistry.getFluid(f.getName()); blockCloudSeed = BlockFluidEio.create(fluidCloudSeed, Material.WATER, 0x248589); f = new Fluid(Fluids.CLOUD_SEED_CONCENTRATED_NAME, getStill(CLOUD_SEED_CONCENTRATED_NAME), getFlowing(CLOUD_SEED_CONCENTRATED_NAME)).setDensity(1000) .setViscosity(1200); FluidRegistry.registerFluid(f); fluidCloudSeedConcentrated = FluidRegistry.getFluid(f.getName()); blockCloudSeedConcentrated = BlockFluidEio.create(fluidCloudSeedConcentrated, Material.WATER, 0x3f5c5d); if (!Loader.isModLoaded("OpenBlocks")) { f = new Fluid(Config.xpJuiceName, Fluids.getRaw(Fluids.XP_JUICE_NAME + "still"), Fluids.getRaw(Fluids.XP_JUICE_NAME + "flowing")) .setLuminosity(10).setDensity(800).setViscosity(1500).setUnlocalizedName("eio.xpjuice"); if (FluidRegistry.registerFluid(f)) { Log.info("XP Juice registered by Ender IO."); fluidXpJuice = FluidRegistry.getFluid(f.getName()); } else { Log.info("XP Juice already registered by another mod as '" + FluidRegistry.getFluid(f.getName()).getUnlocalizedName() + "'"); fluidXpJuice = null; // will be set later } } else { Log.info("XP Juice registration left to Open Blocks."); } Buckets.createBuckets(); } public void forgeRegisterXPJuice() { fluidXpJuice = FluidRegistry.getFluid(getXPJuiceName()); if (fluidXpJuice == null) { Log.error("Liquid XP Juice registration left to open blocks but could not be found."); } } @SideOnly(Side.CLIENT) public void registerRenderers() { MinecraftForge.EVENT_BUS.register(this); registerFluidBlockRendering(fluidNutrientDistillation, NUTRIENT_DISTILLATION_NAME); registerFluidBlockRendering(fluidEnderDistillation, ENDER_DISTILLATION_NAME); registerFluidBlockRendering(fluidHootch, HOOTCH_NAME); registerFluidBlockRendering(fluidFireWater, FIRE_WATER_NAME); registerFluidBlockRendering(fluidRocketFuel, ROCKET_FUEL_NAME); registerFluidBlockRendering(fluidLiquidSunshine, LIQUID_SUNSHINE_NAME); registerFluidBlockRendering(fluidCloudSeed, CLOUD_SEED_NAME); registerFluidBlockRendering(fluidCloudSeedConcentrated, CLOUD_SEED_CONCENTRATED_NAME); registerFluidBlockRendering(fluidVaporOfLevity, VAPOR_OF_LEVITY_NAME); } @SideOnly(Side.CLIENT) public void registerFluidBlockRendering(Fluid fluid, String name) { FluidStateMapper mapper = new FluidStateMapper(fluid); Block block = fluid.getBlock(); Item item = Item.getItemFromBlock(block); // item-model if (item != null) { ModelLoader.registerItemVariants(item); ModelLoader.setCustomMeshDefinition(item, mapper); } // block-model if (block != null) { ModelLoader.setCustomStateMapper(block, mapper); } } @SideOnly(Side.CLIENT) @SubscribeEvent public void onIconLoad(TextureStitchEvent.Pre event) { if (fluidXpJuice != null) { event.getMap().registerSprite(fluidXpJuice.getStill()); event.getMap().registerSprite(fluidXpJuice.getFlowing()); } } private static String getXPJuiceName() { String openBlocksXPJuiceName = null; try { Field getField = Class.forName("openblocks.Config").getField("xpFluidId"); openBlocksXPJuiceName = (String) getField.get(null); } catch (Exception e) { } if (openBlocksXPJuiceName != null && !Config.xpJuiceName.equals(openBlocksXPJuiceName)) { Log.info("Overwriting XP Juice name with '" + openBlocksXPJuiceName + "' taken from OpenBlocks' config"); return openBlocksXPJuiceName; } return Config.xpJuiceName; } public static class FluidStateMapper extends StateMapperBase implements ItemMeshDefinition { public final Fluid fluid; public final ModelResourceLocation location; public FluidStateMapper(Fluid fluid) { this.fluid = fluid; location = new ModelResourceLocation(EnderIO.DOMAIN + ":fluids", fluid.getName()); } @Override protected ModelResourceLocation getModelResourceLocation(IBlockState state) { return location; } @Override public ModelResourceLocation getModelLocation(ItemStack stack) { return location; } } }
package de.dhbw.humbuch.util; import java.io.IOException; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; /** * Util class for looking up a book by its isbn * <ul> * <li>Standard ISBN API is isbndb.com</li> * <li>Document retrieval, validation and parsing can be overridden in subclass</li> * </ul> */ public class BookLookup { private final static String KEY = "CONBNUOZ"; private final static String LOOKUP_URL = "http://isbndb.com/api/v2/xml/" + KEY + "/book/"; /** * Look up a book by its ISBN * * @param isbn * {@link String} containing the ISBN - all non-numerical * characters are ignored * @return {@link Book} containing the book data * @throws BookNotFoundException * when a book is not found */ public static Book lookup(String isbn) throws BookNotFoundException { Document document = retrieveDocument(buildLookupURL(processISBN(isbn))); validateDocument(document); return parseDocument(document); } /** * Removes all non-numerical characters from the isbn * * @param isbn * {@link String} containing the ISBN * @return {@link String} without all non-numerical characters */ protected static String processISBN(String isbn) { return isbn.replaceAll("[^\\d]", ""); } /** * Builds the URI for the ISBN API * * @param isbn * {@link String} containing the ISBN * @return {@link String} containing the ISBN API URL */ protected static String buildLookupURL(String isbn) { return LOOKUP_URL + isbn; } /** * Retrieve an document from a given URI * * @param uri * {@link String} containing the URI * @return {@link Document} retrieved from the URI * @throws BookNotFoundException * thrown when an error occurs while retrieving the document */ protected static Document retrieveDocument(String uri) throws BookNotFoundException { try { DocumentBuilder documentBuilder = DocumentBuilderFactory .newInstance().newDocumentBuilder(); Document document = documentBuilder.parse(uri); document.getDocumentElement().normalize(); return document; } catch (ParserConfigurationException | SAXException | IOException e) { throw new BookNotFoundException( "Error during retrieving the XML document...", e); } } /** * Checks if the document contains valid data for a book * * @param document * {@link Document} to be validated * @throws BookNotFoundException * thrown when the document contains no valid book data */ protected static void validateDocument(Document document) throws BookNotFoundException { Object error = getNodeValue(document, "error"); if (error != null) { throw new BookNotFoundException("No book found..."); } } /** * Parse a document and extract the book data * * @param document * {@link Document} containing the book data * @return {@link Book} with the extracted data */ protected static Book parseDocument(Document document) { return new Book.Builder(getNodeValue(document, "title")) .author(getNodeValue(document, "name")) .isbn10(getNodeValue(document, "isbn10")) .isbn13(getNodeValue(document, "isbn13")) .publisher(getNodeValue(document, "publisher_name")).build(); } /** * Extract the value of the first node of an element * * @param document * the {@link Document} * @param elementName * name of the element of which the value should be extracted * @return {@link String} containing the content of the element if it * exists, otherwise <code>null</code> */ private static String getNodeValue(Document document, String elementName) { NodeList data = document.getElementsByTagName(elementName); Element element = (Element) data.item(0); if (element != null) { Node node = element.getChildNodes().item(0); return node.getNodeValue(); } return null; } /** * Exception indicating a book was not found */ public static class BookNotFoundException extends Exception { private static final long serialVersionUID = -644882332116172763L; public BookNotFoundException() { super(); } public BookNotFoundException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { super(message, cause, enableSuppression, writableStackTrace); } public BookNotFoundException(String message, Throwable cause) { super(message, cause); } public BookNotFoundException(String message) { super(message); } public BookNotFoundException(Throwable cause) { super(cause); } } /** * POJO holding information about the book */ public static class Book { public final String author; public final String isbn10; public final String isbn13; public final String publisher; public final String title; private Book(Builder builder) { this.author = builder.author; this.isbn10 = builder.isbn10; this.isbn13 = builder.isbn13; this.publisher = builder.publisher; this.title = builder.title; } public static class Builder { private String author; private String isbn10; private String isbn13; private String publisher; private String title; public Builder(String title) { this.title = title; } public Builder author(String author) { this.author = author; return this; } public Builder isbn10(String isbn10) { this.isbn10 = isbn10; return this; } public Builder isbn13(String isbn13) { this.isbn13 = isbn13; return this; } public Builder publisher(String publisher) { this.publisher = publisher; return this; } public Book build() { return new Book(this); } } } }
package de.htw_berlin.HoboOthello.KI; import de.htw_berlin.HoboOthello.Core.*; import java.util.ArrayList; import java.util.List; public class KI extends Player { private Color kiColor; private Level level; private Board board; private Field field; private GameRule gameRule; public KI(Color kiColor) { this.kiColor = kiColor; } /** * Method which will be used by the controller to let the KI pick a field * * @param board actual board, information for the KI to act upon * @return Field with coordinates where the KI wants to put down a stone */ public Field setMove(Board board) { Field fieldToSetMove = null; if (level == level.LEVEL1) { fieldToSetMove = pickRandomFieldFromList(); return fieldToSetMove; } if (level == level.LEVEL2) { fieldToSetMove = pickCornerOrSideFieldFromList(); if (fieldToSetMove == null) { fieldToSetMove = pickRandomFieldFromList(); } return fieldToSetMove; } if (level == level.LEVEL3) { return fieldToSetMove; } else { throw new IllegalArgumentException("Level of KI is off...!"); } } /** * Method which lists all possible moves for the KI * * @return listOfPossibleMoves */ private List<Field> listPossibleMoves() { List<Field> listOfPossibleMoves = new ArrayList<Field>(); for (Field field : board.iterateThroughAllFields()) { if (gameRule.isMoveAllowed(field, kiColor)) { listOfPossibleMoves.add(field); } } return listOfPossibleMoves; } /** * Method which picks a random move from the list of all possible moves for the KI * * @return field which is randomly chosen by this method */ private Field pickRandomFieldFromList() { Field fieldToSet; List<Field> listOfPossibleMoves = listPossibleMoves(); int randomNumber = (int) (Math.random() * listOfPossibleMoves.size()); // picks random index of field in list fieldToSet = listOfPossibleMoves.get(randomNumber); return fieldToSet; } /** * Method which picks the first possible corner field or the first possible side field for a turn * If there are no corner or side fields possible for this turn, this method returns null * * @return cornerOrSideField a possible corner or side field, null field if both are impossible */ private Field pickCornerOrSideFieldFromList() { List<Field> listOfPossibleMoves = listPossibleMoves(); Field cornerOrSideField = null; int cornerFieldIndex = 0; while (cornerFieldIndex < listOfPossibleMoves.size()) { Field field = listOfPossibleMoves.get(cornerFieldIndex); if (board.isCornerField(field)) { cornerOrSideField = field; } else { cornerFieldIndex++; } } int sideFieldIndex = 0; while (sideFieldIndex < listOfPossibleMoves.size()) { Field field = listOfPossibleMoves.get(sideFieldIndex); if (board.isSideField(field)) { cornerOrSideField = field; } else { sideFieldIndex++; } } return cornerOrSideField; } }
package de.rennspur.backend; import java.util.List; import javax.inject.Inject; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.Query; import javax.ws.rs.FormParam; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import de.rennspur.model.Club; import de.rennspur.model.Event; import de.rennspur.model.Race; import de.rennspur.model.Team; import de.rennspur.model.TeamMember; import de.rennspur.model.TeamPosition; import java.util.List; import javax.inject.Inject; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.Query; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import de.rennspur.model.Club; import de.rennspur.model.Event; import de.rennspur.model.Race; import de.rennspur.model.Team; import de.rennspur.model.TeamMember; import de.rennspur.model.TeamPosition; /** * This Api part provides the Api endpoint for the webfrontend. * * @author leo.winter, leon.schlender * @param <FrontendData> */ @Path("/APId") public class ApiFrontend<FrontendData> { @Inject private EntityManagerFactory emf; @GET @Path("/full") @Produces(MediaType.APPLICATION_JSON) public FrontendData getFrontendDataInJSON() { // TODO - Output of every needed value. Club club = new Club(); Event event = new Event(); Race race = new Race(); Team team = new Team(); TeamMember teammember = new TeamMember(); TeamPosition teamposition = new TeamPosition(); team.getName(); club.getAbreviation(); club.getId(); // Backend.getLatestMemberPositions(i?, 10); event.getHandicap(); event.getWaypoints(); // pos part //teampostion.getTime(); //teampostion.getLatitude(); //teampostion.getLongitude(); // TODO - return the result to the client return null; } /** * Returns a specific amount of the latest Positions of a team * * @param teamid * ID of the wanted team * @param positionsCount * @return */ @POST @Path("/FrontendUpdate") @Produces(MediaType.APPLICATION_XML) /** * Returns a specific amount of the latest Positions of a team * * @param teamid * ID of the wanted team * @param positionsCount * @return */ public List<TeamPosition> getLatestTeamPositions(@FormParam("id") int teamid) { EntityManager em = emf.createEntityManager(); Query query = em.createNamedQuery("TeamPosition.findLatestPositions"); query.setParameter("id", teamid); //query.setParameter("limit", limit); @SuppressWarnings("unchecked") List<TeamPosition> positions = query.getResultList(); return positions; } }
package de.st_ddt.crazylogin; import java.io.IOException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.Location; import org.bukkit.OfflinePlayer; import org.bukkit.World; import org.bukkit.command.CommandSender; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.entity.Player; import org.bukkit.event.HandlerList; import org.bukkit.plugin.PluginManager; import org.bukkit.plugin.messaging.Messenger; import de.st_ddt.crazycore.CrazyCore; import de.st_ddt.crazylogin.commands.CommandAdminLogin; import de.st_ddt.crazylogin.commands.CommandAutoLogout; import de.st_ddt.crazylogin.commands.CommandExecutor; import de.st_ddt.crazylogin.commands.CommandLogin; import de.st_ddt.crazylogin.commands.CommandLoginWithAutoLogout; import de.st_ddt.crazylogin.commands.CommandLogout; import de.st_ddt.crazylogin.commands.CommandMainCommands; import de.st_ddt.crazylogin.commands.CommandMainDropOldData; import de.st_ddt.crazylogin.commands.CommandMainGenerateToken; import de.st_ddt.crazylogin.commands.CommandPassword; import de.st_ddt.crazylogin.commands.CommandPlayerCheckPassword; import de.st_ddt.crazylogin.commands.CommandPlayerCreate; import de.st_ddt.crazylogin.commands.CommandPlayerDetachIP; import de.st_ddt.crazylogin.commands.CommandPlayerExpirePassword; import de.st_ddt.crazylogin.commands.CommandPlayerPassword; import de.st_ddt.crazylogin.commands.CommandPlayerReverify; import de.st_ddt.crazylogin.commands.CommandSaveLoginLocation; import de.st_ddt.crazylogin.commands.CommandTokenLogin; import de.st_ddt.crazylogin.commands.CrazyCommandLoginCheck; import de.st_ddt.crazylogin.crypt.ChangedAlgorithmEncryptor; import de.st_ddt.crazylogin.crypt.CrazyCrypt1; import de.st_ddt.crazylogin.crypt.CrazyCrypt2; import de.st_ddt.crazylogin.crypt.EncryptHelper; import de.st_ddt.crazylogin.crypt.Encryptor; import de.st_ddt.crazylogin.crypt.MD2Crypt; import de.st_ddt.crazylogin.crypt.MD5Crypt; import de.st_ddt.crazylogin.crypt.PlainCrypt; import de.st_ddt.crazylogin.crypt.SHA_1Crypt; import de.st_ddt.crazylogin.crypt.SHA_256Crypt; import de.st_ddt.crazylogin.crypt.SHA_512Crypt; import de.st_ddt.crazylogin.crypt.SeededMD2Crypt; import de.st_ddt.crazylogin.crypt.SeededMD5Crypt; import de.st_ddt.crazylogin.crypt.SeededSHA_1Crypt; import de.st_ddt.crazylogin.crypt.SeededSHA_256Crypt; import de.st_ddt.crazylogin.crypt.SeededSHA_512Crypt; import de.st_ddt.crazylogin.crypt.UpdatingEncryptor; import de.st_ddt.crazylogin.crypt.WebCrypt; import de.st_ddt.crazylogin.crypt.WhirlPoolCrypt; import de.st_ddt.crazylogin.data.LoginData; import de.st_ddt.crazylogin.data.LoginPlayerData; import de.st_ddt.crazylogin.data.LoginUnregisteredPlayerData; import de.st_ddt.crazylogin.data.Token; import de.st_ddt.crazylogin.data.comparator.LoginDataComparator; import de.st_ddt.crazylogin.data.comparator.LoginDataIPComparator; import de.st_ddt.crazylogin.data.comparator.LoginDataLastActionComparator; import de.st_ddt.crazylogin.databases.CrazyLoginConfigurationDatabase; import de.st_ddt.crazylogin.databases.CrazyLoginDataDatabase; import de.st_ddt.crazylogin.databases.CrazyLoginFlatDatabase; import de.st_ddt.crazylogin.databases.CrazyLoginMySQLDatabase; import de.st_ddt.crazylogin.databases.CrazyLoginSQLiteDatabase; import de.st_ddt.crazylogin.events.CrazyLoginLoginEvent; import de.st_ddt.crazylogin.events.CrazyLoginLoginFailEvent; import de.st_ddt.crazylogin.events.CrazyLoginPasswordEvent; import de.st_ddt.crazylogin.events.CrazyLoginPreLoginEvent; import de.st_ddt.crazylogin.events.CrazyLoginPreRegisterEvent; import de.st_ddt.crazylogin.events.LoginFailReason; import de.st_ddt.crazylogin.exceptions.CrazyLoginExceedingMaxRegistrationsPerIPException; import de.st_ddt.crazylogin.exceptions.CrazyLoginRegistrationsDisabled; import de.st_ddt.crazylogin.exceptions.PasswordRejectedException; import de.st_ddt.crazylogin.exceptions.PasswordRejectedLengthException; import de.st_ddt.crazylogin.listener.CrazyListener; import de.st_ddt.crazylogin.listener.DynamicPlayerListener; import de.st_ddt.crazylogin.listener.DynamicPlayerListener_1_2_5; import de.st_ddt.crazylogin.listener.DynamicPlayerListener_1_3_2; import de.st_ddt.crazylogin.listener.DynamicPlayerListener_1_4_2; import de.st_ddt.crazylogin.listener.DynamicPlayerListener_1_5; import de.st_ddt.crazylogin.listener.DynamicVehicleListener; import de.st_ddt.crazylogin.listener.MessageListener; import de.st_ddt.crazylogin.listener.PlayerListener; import de.st_ddt.crazylogin.listener.WorldListener; import de.st_ddt.crazylogin.metadata.Authenticated; import de.st_ddt.crazylogin.tasks.DropInactiveAccountsTask; import de.st_ddt.crazylogin.tasks.ScheduledCheckTask; import de.st_ddt.crazyplugin.CrazyPlayerDataPlugin; import de.st_ddt.crazyplugin.data.PlayerDataFilter; import de.st_ddt.crazyplugin.data.PlayerDataNameFilter; import de.st_ddt.crazyplugin.events.CrazyPlayerRemoveEvent; import de.st_ddt.crazyplugin.exceptions.CrazyCommandCircumstanceException; import de.st_ddt.crazyplugin.exceptions.CrazyCommandErrorException; import de.st_ddt.crazyplugin.exceptions.CrazyCommandException; import de.st_ddt.crazyplugin.exceptions.CrazyCommandNoSuchException; import de.st_ddt.crazyplugin.exceptions.CrazyCommandParameterException; import de.st_ddt.crazyplugin.exceptions.CrazyCommandPermissionException; import de.st_ddt.crazyplugin.exceptions.CrazyCommandUsageException; import de.st_ddt.crazyplugin.exceptions.CrazyException; import de.st_ddt.crazyutil.ChatHelper; import de.st_ddt.crazyutil.ChatHelperExtended; import de.st_ddt.crazyutil.ListOptionsModder; import de.st_ddt.crazyutil.ObjectSaveLoadHelper; import de.st_ddt.crazyutil.PreSetList; import de.st_ddt.crazyutil.VersionHelper; import de.st_ddt.crazyutil.databases.DatabaseType; import de.st_ddt.crazyutil.databases.PlayerDataDatabase; import de.st_ddt.crazyutil.locales.CrazyLocale; import de.st_ddt.crazyutil.metrics.Metrics; import de.st_ddt.crazyutil.metrics.Metrics.Graph; import de.st_ddt.crazyutil.metrics.Metrics.Plotter; import de.st_ddt.crazyutil.modes.BooleanFalseMode; import de.st_ddt.crazyutil.modes.BooleanTrueMode; import de.st_ddt.crazyutil.modes.DoubleMode; import de.st_ddt.crazyutil.modes.DurationMode; import de.st_ddt.crazyutil.modes.IntegerMode; import de.st_ddt.crazyutil.modes.LongMode; import de.st_ddt.crazyutil.modes.Mode; import de.st_ddt.crazyutil.modules.login.CrazyLoginSystem; import de.st_ddt.crazyutil.modules.login.LoginModule; import de.st_ddt.crazyutil.paramitrisable.BooleanParamitrisable; import de.st_ddt.crazyutil.paramitrisable.Paramitrisable; import de.st_ddt.crazyutil.source.Localized; import de.st_ddt.crazyutil.source.LocalizedVariable; import de.st_ddt.crazyutil.source.Permission; import de.st_ddt.crazyutil.source.PermissionVariable; @LocalizedVariable(variables = { "CRAZYPLUGIN", "CRAZYPLAYERDATAPLUGIN" }, values = { "CRAZYLOGIN", "CRAZYLOGIN" }) @PermissionVariable(variables = { "CRAZYPLUGIN", "CRAZYPLAYERDATAPLUGIN" }, values = { "CRAZYLOGIN", "CRAZYLOGIN" }) public final class CrazyLogin extends CrazyPlayerDataPlugin<LoginData, LoginPlayerData> implements LoginPlugin<LoginPlayerData> { private static CrazyLogin plugin; private final Map<String, Date> antiRequestSpamTable = new HashMap<String, Date>(); private final Map<String, Integer> loginFailuresPerIP = new HashMap<String, Integer>(); private final Map<String, Integer> illegalCommandUsesPerIP = new HashMap<String, Integer>(); private final Map<String, Date> tempBans = new HashMap<String, Date>(); private final Map<String, Token> loginTokens = new HashMap<String, Token>(); private final Set<Player> playerAutoLogouts = new HashSet<Player>(); private final Map<String, Location> saveLoginLocations = new HashMap<String, Location>(); private PlayerListener playerListener; private DynamicPlayerListener dynamicPlayerListener; private DynamicVehicleListener dynamicVehicleListener; private MessageListener messageListener; private boolean dynamicHooksRegistered; // plugin config private boolean alwaysNeedPassword; private boolean confirmNewPassword; private boolean confirmWithOldPassword; private boolean dynamicProtection; private boolean hideWarnings; private int autoLogout; private int autoKick; private long autoTempBan; private int autoKickUnregistered; private int autoKickLoginFailer; private long autoTempBanLoginFailer; private int autoKickCommandUsers; private long autoTempBanCommandUsers; private boolean blockGuestCommands; private boolean blockGuestChat; private boolean blockGuestJoin; private boolean removeGuestData; private List<String> commandWhiteList; private String uniqueIDKey; private boolean disableRegistrations; private boolean disableAdminLogin; private boolean disableTokenLogin; private boolean doNotSpamAuthRequests; private boolean doNotSpamRegisterRequests; private long delayAuthRequests; private long repeatAuthRequests; private boolean forceSingleSession; private boolean forceSingleSessionSameIPBypass; private long delayPreRegisterSecurity; private long delayPreLoginSecurity; private boolean saveLoginEnabled; private boolean forceSaveLogin; private boolean hideInventory; private boolean hidePlayer; private boolean hideChat; private boolean delayJoinQuitMessages; private boolean useCustomJoinQuitMessages; private boolean hidePasswordsFromConsole; private Encryptor encryptor; private int minPasswordLength; private int protectedAccountMinPasswordLength; private int autoDelete; private int maxStoredIPs; private int maxOnlinesPerIP; private int maxRegistrationsPerIP; private boolean pluginCommunicationEnabled; private double moveRange; private String filterNames; private boolean blockDifferentNameCases; private int minNameLength; private int maxNameLength; static { // Encryption Algorithms EncryptHelper.registerAlgorithm("Plaintext", PlainCrypt.class); EncryptHelper.registerAlgorithm("MD2", MD2Crypt.class); EncryptHelper.registerAlgorithm("MD5", MD5Crypt.class); EncryptHelper.registerAlgorithm("SHA-1", SHA_1Crypt.class); EncryptHelper.registerAlgorithm("SHA-256", SHA_256Crypt.class); EncryptHelper.registerAlgorithm("SHA-512", SHA_512Crypt.class); EncryptHelper.registerAlgorithm("SeededMD2", SeededMD2Crypt.class); EncryptHelper.registerAlgorithm("SeededMD5", SeededMD5Crypt.class); EncryptHelper.registerAlgorithm("SeededSHA-1", SeededSHA_1Crypt.class); EncryptHelper.registerAlgorithm("SeededSHA-256", SeededSHA_256Crypt.class); EncryptHelper.registerAlgorithm("SeededSHA-512", SeededSHA_512Crypt.class); EncryptHelper.registerAlgorithm("CrazyCrypt1", CrazyCrypt1.class); EncryptHelper.registerAlgorithm("CrazyCrypt2", CrazyCrypt2.class); EncryptHelper.registerAlgorithm("WebCrypt", WebCrypt.class); EncryptHelper.registerAlgorithm("Whirlpool", WhirlPoolCrypt.class); // LoginSystem LoginModule.registerLoginSystem(CrazyLoginSystem.class); LoginModule.clear(); } public static CrazyLogin getPlugin() { return plugin; } public CrazyLogin() { super(); registerPreSetLists(); registerModes(); registerFilter(); registerSorters(); } private void registerPreSetLists() { new PreSetList("login_verified") { @Override public List<String> getList() { final List<String> names = new ArrayList<String>(); for (final LoginPlayerData data : getOnlinePlayerDatas()) if (data.isLoggedIn()) names.add(data.getName()); return names; } }.register(); new PreSetList("login_notverified") { @Override public List<String> getList() { final List<String> names = new ArrayList<String>(); for (final LoginPlayerData data : getOnlinePlayerDatas()) if (!data.isLoggedIn()) names.add(data.getName()); return names; } }.register(); new PreSetList("login_guest") { @Override public List<String> getList() { final List<String> names = new ArrayList<String>(); for (final Player player : Bukkit.getOnlinePlayers()) if (!hasPlayerData(player)) names.add(player.getName()); return names; } }.register(); } @Localized("CRAZYLOGIN.MODE.CHANGE $Name$ $Value$") private void registerModes() { modeCommand.addMode(new BooleanFalseMode(this, "alwaysNeedPassword") { @Override public Boolean getValue() { return alwaysNeedPassword; } @Override public void setValue(final Boolean newValue) throws CrazyException { alwaysNeedPassword = newValue; saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "confirmNewPassword") { @Override public Boolean getValue() { return confirmNewPassword; } @Override public void setValue(final Boolean newValue) throws CrazyException { confirmNewPassword = newValue; saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "confirmWithOldPassword") { @Override public Boolean getValue() { return confirmWithOldPassword; } @Override public void setValue(final Boolean newValue) throws CrazyException { confirmWithOldPassword = newValue; saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "dynamicProtection") { @Override public Boolean getValue() { return dynamicProtection; } @Override public void setValue(final Boolean newValue) throws CrazyException { dynamicProtection = newValue; if (dynamicProtection) unregisterDynamicHooks(); else registerDynamicHooks(); saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "hideWarnings") { @Override public Boolean getValue() { return hideWarnings; } @Override public void setValue(final Boolean newValue) throws CrazyException { hideWarnings = newValue; saveConfiguration(); } }); modeCommand.addMode(new DurationMode(this, "delayPreRegisterSecurity") { @Override public Long getValue() { return delayPreRegisterSecurity * 50; } @Override public void setValue(final Long newValue) throws CrazyException { delayPreRegisterSecurity = Math.max(newValue / 50, -1); saveConfiguration(); } }); modeCommand.addMode(new DurationMode(this, "delayPreLoginSecurity") { @Override public Long getValue() { return delayPreLoginSecurity * 50; } @Override public void setValue(final Long newValue) throws CrazyException { delayPreLoginSecurity = Math.max(newValue / 50, -1); saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "saveLoginEnabled") { @Override public Boolean getValue() { return saveLoginEnabled; } @Override public void setValue(final Boolean newValue) throws CrazyException { saveLoginEnabled = newValue; if (newValue) saveLoginEnabled = true; else { saveLoginEnabled = false; forceSaveLogin = false; } saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "forceSaveLogin") { @Override public Boolean getValue() { return forceSaveLogin; } @Override public void setValue(final Boolean newValue) throws CrazyException { if (newValue) { saveLoginEnabled = true; forceSaveLogin = true; } else forceSaveLogin = false; saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "hideInventory") { @Override public Boolean getValue() { return hideInventory; } @Override public void setValue(final Boolean newValue) throws CrazyException { hideInventory = newValue; saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "hidePlayer") { @Override public Boolean getValue() { return hidePlayer; } @Override public void setValue(final Boolean newValue) throws CrazyException { hidePlayer = newValue; saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "hideChat") { @Override public Boolean getValue() { return hideChat; } @Override public void setValue(final Boolean newValue) throws CrazyException { hideChat = newValue; saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "delayJoinQuitMessages") { @Override public Boolean getValue() { return delayJoinQuitMessages; } @Override public void setValue(final Boolean newValue) throws CrazyException { delayJoinQuitMessages = newValue; saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "useCustomJoinQuitMessages") { @Override public Boolean getValue() { return useCustomJoinQuitMessages; } @Override public void setValue(final Boolean newValue) throws CrazyException { useCustomJoinQuitMessages = newValue; saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "hidePasswordsFromConsole") { @Override public Boolean getValue() { return hidePasswordsFromConsole; } @Override public void setValue(final Boolean newValue) throws CrazyException { hidePasswordsFromConsole = newValue; saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "disableRegistrations") { @Override public Boolean getValue() { return disableRegistrations; } @Override public void setValue(final Boolean newValue) throws CrazyException { disableRegistrations = newValue; saveConfiguration(); } }); modeCommand.addMode(new BooleanTrueMode(this, "disableAdminLogin") { @Override public Boolean getValue() { return disableAdminLogin; } @Override public void setValue(final Boolean newValue) throws CrazyException { disableAdminLogin = newValue; saveConfiguration(); } }); modeCommand.addMode(new BooleanTrueMode(this, "disableTokenLogin") { @Override public Boolean getValue() { return disableTokenLogin; } @Override public void setValue(final Boolean newValue) throws CrazyException { disableTokenLogin = newValue; saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "doNotSpamAuthRequests") { @Override public Boolean getValue() { return doNotSpamAuthRequests; } @Override public void setValue(final Boolean newValue) throws CrazyException { doNotSpamAuthRequests = newValue; saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "doNotSpamRegisterRequests") { @Override public Boolean getValue() { return doNotSpamRegisterRequests; } @Override public void setValue(final Boolean newValue) throws CrazyException { doNotSpamRegisterRequests = newValue; saveConfiguration(); } }); modeCommand.addMode(new DurationMode(this, "delayAuthRequests") { @Override public Long getValue() { return delayAuthRequests * 50; } @Override public void setValue(final Long newValue) throws CrazyException { delayAuthRequests = Math.max(newValue / 50, 0); saveConfiguration(); } }); modeCommand.addMode(new DurationMode(this, "repeatAuthRequests") { @Override public Long getValue() { return repeatAuthRequests * 50; } @Override public void setValue(final Long newValue) throws CrazyException { repeatAuthRequests = Math.max(newValue / 50, 0); saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "forceSingleSession") { @Override public Boolean getValue() { return forceSingleSession; } @Override public void setValue(final Boolean newValue) throws CrazyException { forceSingleSession = newValue; saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "forceSingleSessionSameIPBypass") { @Override public Boolean getValue() { return forceSingleSessionSameIPBypass; } @Override public void setValue(final Boolean newValue) throws CrazyException { forceSingleSessionSameIPBypass = newValue; saveConfiguration(); } }); modeCommand.addMode(new IntegerMode(this, "autoLogout") { @Override public void showValue(final CommandSender sender) { sendLocaleMessage("MODE.CHANGE", sender, name, getValue() == -1 ? "disabled" : getValue() + " seconds"); } @Override public Integer getValue() { return autoLogout; } @Override public void setValue(final Integer newValue) throws CrazyException { autoLogout = Math.max(newValue, -1); saveConfiguration(); } }); modeCommand.addMode(new IntegerMode(this, "autoKick") { @Override public void showValue(final CommandSender sender) { sendLocaleMessage("MODE.CHANGE", sender, name, getValue() == -1 ? "disabled" : getValue() + " seconds"); } @Override public Integer getValue() { return autoKick; } @Override public void setValue(final Integer newValue) throws CrazyException { autoKick = Math.max(newValue, -1); saveConfiguration(); } }); modeCommand.addMode(new LongMode(this, "autoTempBan") { @Override public void showValue(final CommandSender sender) { sendLocaleMessage("MODE.CHANGE", sender, name, getValue() == -1 ? "disabled" : getValue() + " seconds"); } @Override public Long getValue() { return autoTempBan; } @Override public void setValue(final Long newValue) throws CrazyException { autoTempBan = Math.max(newValue, -1); saveConfiguration(); } }); modeCommand.addMode(new IntegerMode(this, "autoKickUnregistered") { @Override public void showValue(final CommandSender sender) { sendLocaleMessage("MODE.CHANGE", sender, name, getValue() == -1 ? "disabled" : getValue() + " seconds"); } @Override public Integer getValue() { return autoKickUnregistered; } @Override public void setValue(final Integer newValue) throws CrazyException { autoKickUnregistered = Math.max(newValue, -1); saveConfiguration(); } }); modeCommand.addMode(new IntegerMode(this, "autoKickLoginFailer") { @Override public void showValue(final CommandSender sender) { sendLocaleMessage("MODE.CHANGE", sender, name, getValue() == -1 ? "disabled" : getValue() + " failed attempts"); } @Override public Integer getValue() { return autoKickLoginFailer; } @Override public void setValue(final Integer newValue) throws CrazyException { autoKickLoginFailer = Math.max(newValue, -1); saveConfiguration(); } }); modeCommand.addMode(new LongMode(this, "autoTempBanLoginFailer") { @Override public void showValue(final CommandSender sender) { sendLocaleMessage("MODE.CHANGE", sender, name, getValue() == -1 ? "disabled" : getValue() + " seconds"); } @Override public Long getValue() { return autoTempBanLoginFailer; } @Override public void setValue(final Long newValue) throws CrazyException { autoTempBanLoginFailer = Math.max(newValue, -1); saveConfiguration(); } }); modeCommand.addMode(new IntegerMode(this, "autoKickCommandUsers") { @Override public Integer getValue() { return autoKickCommandUsers; } @Override public void setValue(final Integer newValue) throws CrazyException { autoKickCommandUsers = newValue; saveConfiguration(); } }); modeCommand.addMode(new LongMode(this, "autoTempBanCommandUsers") { @Override public void showValue(final CommandSender sender) { sendLocaleMessage("MODE.CHANGE", sender, name, getValue() == -1 ? "disabled" : getValue() + " seconds"); } @Override public Long getValue() { return autoTempBanCommandUsers; } @Override public void setValue(final Long newValue) throws CrazyException { autoTempBanCommandUsers = Math.max(newValue, -1); saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "blockGuestCommands") { @Override public Boolean getValue() { return blockGuestCommands; } @Override public void setValue(final Boolean newValue) throws CrazyException { blockGuestCommands = newValue; saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "blockGuestChat") { @Override public Boolean getValue() { return blockGuestChat; } @Override public void setValue(final Boolean newValue) throws CrazyException { blockGuestChat = newValue; saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "blockGuestJoin") { @Override public Boolean getValue() { return blockGuestJoin; } @Override public void setValue(final Boolean newValue) throws CrazyException { blockGuestJoin = newValue; saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "removeGuestData") { @Override public Boolean getValue() { return removeGuestData; } @Override public void setValue(final Boolean newValue) throws CrazyException { removeGuestData = newValue; saveConfiguration(); } }); modeCommand.addMode(new IntegerMode(this, "maxStoredIPs") { @Override public Integer getValue() { return maxStoredIPs; } @Override public void setValue(final Integer newValue) throws CrazyException { maxStoredIPs = Math.max(newValue, 1); saveConfiguration(); } }); modeCommand.addMode(new IntegerMode(this, "maxRegistrationsPerIP") { @Override public void showValue(final CommandSender sender) { sendLocaleMessage("MODE.CHANGE", sender, name, getValue() == -1 ? "disabled" : getValue()); } @Override public Integer getValue() { return maxRegistrationsPerIP; } @Override public void setValue(final Integer newValue) throws CrazyException { maxRegistrationsPerIP = Math.max(newValue, -1); saveConfiguration(); } }); modeCommand.addMode(new IntegerMode(this, "maxOnlinesPerIP") { @Override public void showValue(final CommandSender sender) { sendLocaleMessage("MODE.CHANGE", sender, name, getValue() == -1 ? "disabled" : getValue()); } @Override public Integer getValue() { return maxOnlinesPerIP; } @Override public void setValue(final Integer newValue) throws CrazyException { maxOnlinesPerIP = Math.max(newValue, -1); saveConfiguration(); } }); modeCommand.addMode(new Mode<DatabaseType>(this, "saveType", DatabaseType.class) { @Override @Localized("CRAZYLOGIN.PLUGININFO.DATABASEENTRIES $EntryCount$") public void showValue(final CommandSender sender) { super.showValue(sender); if (database != null) sendLocaleMessage("PLUGININFO.DATABASEENTRIES", sender, database.getAllEntries().size()); } @Override public DatabaseType getValue() { return database.getType(); } @Override public void setValue(final CommandSender sender, final String... args) throws CrazyException { if (args.length > 1) throw new CrazyCommandUsageException("[SaveType (CONFIG/FLAT/MYSQL/SQLITE)]"); final String saveType = args[0]; DatabaseType type = null; try { type = DatabaseType.valueOf(saveType.toUpperCase()); } catch (final Exception e) { type = null; } if (type == null) throw new CrazyCommandNoSuchException("SaveType", saveType, "CONFIG", "FLAT", "MYSQL", "SQLITE"); setValue(type); showValue(sender); } @Override public void setValue(final DatabaseType newValue) throws CrazyException { if (database != null) if (newValue == database.getType()) return; final PlayerDataDatabase<LoginPlayerData> oldDatabase = database; getConfig().set("database.saveType", newValue.toString()); loadDatabase(); if (database == null) database = oldDatabase; else if (oldDatabase != null) synchronized (oldDatabase.getDatabaseLock()) { database.saveAll(oldDatabase.getAllEntries()); } save(); } @Override public List<String> tab(final String... args) { final List<String> res = new ArrayList<String>(); res.add("CONFIG"); res.add("FLAT"); res.add("MYSQL"); res.add("SQLITE"); return res; } }); modeCommand.addMode(new IntegerMode(this, "autoDelete") { @Override public void showValue(final CommandSender sender) { sendLocaleMessage("MODE.CHANGE", sender, name, getValue() == -1 ? "disabled" : getValue() + " days"); } @Override public Integer getValue() { return autoDelete; } @Override public void setValue(final Integer newValue) throws CrazyException { autoDelete = Math.max(newValue, -1); saveConfiguration(); if (autoDelete != -1) getServer().getScheduler().runTaskTimerAsynchronously(plugin, new DropInactiveAccountsTask(CrazyLogin.this), 20 * 60 * 60, 20 * 60 * 60 * 6); } }); modeCommand.addMode(new DoubleMode(this, "moveRange") { @Override public void showValue(final CommandSender sender) { sendLocaleMessage("MODE.CHANGE", sender, name, getValue() == -1 ? "disabled" : getValue() + " blocks"); } @Override public Double getValue() { return moveRange; } @Override public void setValue(final Double newValue) throws CrazyException { moveRange = Math.max(newValue, -1); saveConfiguration(); } }); modeCommand.addMode(new Mode<String>(this, "filterNames", String.class) { @Override public void showValue(final CommandSender sender) { sendLocaleMessage("MODE.CHANGE", sender, "filterNames", getValue().equals(".") ? "disabled" : getValue()); } @Override public String getValue() { return filterNames; } @Override public void setValue(final CommandSender sender, final String... args) throws CrazyException { String newFilter = ChatHelper.listingString(" ", args); if (newFilter.equalsIgnoreCase("false") || newFilter.equalsIgnoreCase("0") || newFilter.equalsIgnoreCase("off")) newFilter = "."; else if (newFilter.equalsIgnoreCase("true") || newFilter.equalsIgnoreCase("1") || newFilter.equalsIgnoreCase("on")) newFilter = "[a-zA-Z0-9_]"; setValue(newFilter); showValue(sender); } @Override public void setValue(final String newValue) throws CrazyException { filterNames = newValue; saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "blockDifferentNameCases") { @Override public Boolean getValue() { return blockDifferentNameCases; } @Override public void setValue(final Boolean newValue) throws CrazyException { blockDifferentNameCases = newValue; saveConfiguration(); } }); modeCommand.addMode(new IntegerMode(this, "minNameLength") { @Override public void showValue(final CommandSender sender) { sendLocaleMessage("MODE.CHANGE", sender, name, getValue() + " characters"); } @Override public Integer getValue() { return minNameLength; } @Override public void setValue(final Integer newValue) throws CrazyException { minNameLength = Math.min(Math.max(newValue, 1), 16); saveConfiguration(); } }); modeCommand.addMode(new IntegerMode(this, "maxNameLength") { @Override public void showValue(final CommandSender sender) { sendLocaleMessage("MODE.CHANGE", sender, name, getValue() + " characters"); } @Override public Integer getValue() { return maxNameLength; } @Override public void setValue(final Integer newValue) throws CrazyException { maxNameLength = Math.min(Math.max(newValue, 1), 255); saveConfiguration(); } }); modeCommand.addMode(new BooleanFalseMode(this, "saveDatabaseOnShutdown") { @Override public Boolean getValue() { return saveDatabaseOnShutdown; } @Override public void setValue(final Boolean newValue) throws CrazyException { saveDatabaseOnShutdown = newValue; saveConfiguration(); } }); modeCommand.addMode(new Mode<Encryptor>(this, "algorithm", Encryptor.class) { @Override public Encryptor getValue() { return encryptor; } @Override public void setValue(final CommandSender sender, final String... args) throws CrazyException { final Encryptor encryptor = EncryptHelper.getEncryptor(CrazyLogin.this, args[0], ChatHelperExtended.shiftArray(args, 1)); if (encryptor == null) throw new CrazyCommandNoSuchException("Encryptor", args[0], EncryptHelper.getAlgorithms()); setValue(encryptor); showValue(sender); } @Override public void setValue(final Encryptor newValue) throws CrazyException { if (encryptor.equals(newValue)) encryptor = newValue; else encryptor = new ChangedAlgorithmEncryptor(CrazyLogin.this, newValue, encryptor); saveConfiguration(); } @Override public List<String> tab(final String... args) { if (args.length != 1) return null; final List<String> res = new LinkedList<String>(); final String arg = args[0].toLowerCase(); for (final String algo : EncryptHelper.getAlgorithms()) if (algo.toLowerCase().startsWith(arg)) res.add(algo); return res; } }); modeCommand.addMode(new IntegerMode(this, "minPasswordLength") { @Override public void setValue(final Integer newValue) throws CrazyException { minPasswordLength = newValue; } @Override public Integer getValue() { return minPasswordLength; } }); modeCommand.addMode(new IntegerMode(this, "protectedAccountMinPasswordLength") { @Override public void setValue(final Integer newValue) throws CrazyException { protectedAccountMinPasswordLength = newValue; } @Override public Integer getValue() { return protectedAccountMinPasswordLength; } }); } private void registerFilter() { playerDataFilters.add(new PlayerDataNameFilter<LoginData>()); playerDataFilters.add(new PlayerDataFilter<LoginData>("ip", new String[] { "ip" }) { @Override public FilterInstance getInstance() { return new FilterInstance() { private String ip = null; @Override public void setParameter(final String parameter) throws CrazyException { ip = parameter; } @Override public boolean isActive() { return ip != null; } @Override public boolean filter(final LoginData data) { return data.hasIP(ip); } }; } }); playerDataFilters.add(new PlayerDataFilter<LoginData>("online", new String[] { "on", "online" }) { @Override public FilterInstance getInstance() { return new FilterInstance() { private Boolean online = null; @Override public void setParameter(String parameter) throws CrazyException { parameter = parameter.toLowerCase(); if (parameter.equals("true")) online = true; else if (parameter.equals("1")) online = true; else if (parameter.equals("y")) online = true; else if (parameter.equals("yes")) online = true; else if (parameter.equals("false")) online = false; else if (parameter.equals("0")) online = false; else if (parameter.equals("n")) online = false; else if (parameter.equals("no")) online = false; else if (parameter.equals("*")) online = null; else /** * Checks whether the player is allowed to execute the given command. * * @param player * The player who should be checked. * @param command * The command which should be checked. * @return True, if the given player is allowed to executed the given command. False otherwise. */ @Permission("crazylogin.warncommandexploits") @Localized({ "CRAZYLOGIN.KICKED.COMMANDUSAGE", "CRAZYLOGIN.COMMAND.EXPLOITWARN $Name$ $IP$ $Command$ $Fails$" }) public boolean playerCommand(final Player player, final String command) { if (hasPlayerData(player)) { if (isLoggedIn(player)) return true; } else if (!blockGuestCommands) return true; final String lowerCommand = command.toLowerCase(); if (lowerCommand.startsWith("/")) { for (final String whiteCommand : commandWhiteList) if (lowerCommand.matches(whiteCommand)) return true; final String IP = player.getAddress().getAddress().getHostAddress(); Integer fails = illegalCommandUsesPerIP.get(IP); if (fails == null) fails = 1; else fails++; if (autoKickCommandUsers > 0 && fails % autoKickCommandUsers == 0) { logger.log("CommandBlocked", player.getName() + " @ " + IP + " has been kicked for trying to illegaly execute a command", command, "(AttemptPerIP: " + fails + ")"); player.kickPlayer(locale.getFormatedLocaleMessage(player, "KICKED.COMMANDUSAGE")); if (autoTempBanCommandUsers > 0) setTempBanned(player, autoTempBanCommandUsers); } else { logger.log("CommandBlocked", player.getName() + " @ " + IP + " tried to illegaly execute a command", command, "(AttemptPerIP: " + fails + ")"); sendAuthReminderMessage(player); } illegalCommandUsesPerIP.put(IP, fails); if (!hideWarnings) broadcastLocaleMessage(true, "crazylogin.warncommandexploits", true, "COMMAND.EXPLOITWARN", player.getName(), IP, command.replaceAll("\\$", "_"), fails); return false; } else return true; } @Override @Permission("crazylogin.requirepassword") public boolean isLoggedIn(final Player player) { if (player.hasMetadata("NPC")) return true; final LoginPlayerData data = getPlayerData(player); if (data == null) return !alwaysNeedPassword && !player.hasPermission("crazylogin.requirepassword"); if (player.isOnline()) return data.isLoggedIn(); else return data.checkTimeOut(); } /** * Checks whether the player is logged in and the password is not expired. * * @param player * The player to be checked. * @return True, if the player is logged in successfully and his password is not expired. Otherwise False. */ @Permission("crazylogin.requirepassword") public boolean isLoggedInPlus(final Player player) { if (player.hasMetadata("NPC")) return true; final LoginPlayerData data = getPlayerData(player); if (data == null) return !alwaysNeedPassword && !player.hasPermission("crazylogin.requirepassword"); if (player.isOnline()) return data.isLoggedIn() && !data.isPasswordExpired(); else return data.checkTimeOut(); } @Override public void forceRelogin(final OfflinePlayer player) { forceRelogin(getPlayerData(player)); } @Override public void forceRelogin(final String name) { forceRelogin(getPlayerData(name)); } public void forceRelogin(final LoginPlayerData data) { if (data == null) return; data.setLoggedIn(false); final Player player = data.getPlayer(); if (player != null) playerListener.PlayerJoin(data.getPlayer()); } public void expirePassword(final String name) { expirePassword(getPlayerData(name)); } public void expirePassword(final LoginPlayerData data) { if (data == null) return; data.expirePassword(); ((CrazyLoginDataDatabase) database).saveWithoutPassword(data); final Player player = data.getPlayer(); if (player != null) sendAuthReminderMessage(player); } @Override @Localized({ "CRAZYLOGIN.REGISTER.REQUEST", "CRAZYLOGIN.LOGIN.PASSWORDEXPIRED", "CRAZYLOGIN.LOGIN.REQUEST" }) public void sendAuthReminderMessage(final Player player) { if (doNotSpamAuthRequests) return; final Date now = new Date(); final Date date = antiRequestSpamTable.get(player.getName()); if (date == null) { now.setTime(now.getTime() + 5000L); antiRequestSpamTable.put(player.getName(), now); } else { if (date.after(now)) return; date.setTime(now.getTime() + 5000L); } final LoginPlayerData data = plugin.getPlayerData(player); if (data == null) sendLocaleMessage("REGISTER.REQUEST", player); else if (data.isLoggedIn()) sendLocaleMessage("LOGIN.PASSWORDEXPIRED", player); else sendLocaleMessage("LOGIN.REQUEST", player); } @Override public boolean isAlwaysNeedPassword() { return alwaysNeedPassword; } public boolean isConfirmNewPasswordEnabled() { return confirmNewPassword; } public boolean isConfirmWithOldPasswordEnabled() { return confirmWithOldPassword; } public boolean isHidingWarningsEnabled() { return hideWarnings; } @Override public boolean isAutoLogoutEnabled() { return autoLogout != -1; } @Override public boolean isInstantAutoLogoutEnabled() { return autoLogout == 0; } @Override public int getAutoLogoutTime() { return autoLogout; } public void checkTimeOuts() { if (database != null) if (autoLogout > 0) { final Date timeOut = new Date(System.currentTimeMillis() - autoLogout * 1000); if (database.isCachedDatabase()) synchronized (database.getDatabaseLock()) { for (final LoginPlayerData data : database.getAllEntries()) if (!data.isOnline()) data.checkTimeOut(timeOut); } else { final HashSet<LoginPlayerData> dropping = new HashSet<LoginPlayerData>(); synchronized (database.getDatabaseLock()) { for (final LoginPlayerData data : database.getAllEntries()) if (!data.isOnline()) if (!data.checkTimeOut(timeOut)) dropping.add(data); } for (final LoginPlayerData data : dropping) database.unloadEntry(data.getName()); dropping.clear(); } } } @Override public int getAutoKick() { return autoKick; } public long getAutoTempBan() { return autoTempBan; } @Override public int getAutoKickUnregistered() { return autoKickUnregistered; } public int getAutoKickLoginFailer() { return autoKickLoginFailer; } public long getAutoTempBanLoginFailer() { return autoTempBanLoginFailer; } @Override public boolean isBlockingGuestChatEnabled() { return blockGuestChat; } @Override public boolean isBlockingGuestJoinEnabled() { return blockGuestJoin; } @Override public boolean isRemovingGuestDataEnabled() { return removeGuestData; } public boolean isTempBanned(final String IP) { final Date date = tempBans.get(IP); if (date == null) return false; else return System.currentTimeMillis() < date.getTime(); } public Date getTempBanned(final String IP) { return tempBans.get(IP); } public String getTempBannedString(final String IP) { final Date date = getTempBanned(IP); if (date == null) return DATETIMEFORMAT.format(new Date(0)); else return DATETIMEFORMAT.format(date); } public void setTempBanned(final Player player, final long duration) { setTempBanned(player.getAddress().getAddress().getHostAddress(), duration); } public void setTempBanned(final String IP, final long duration) { tempBans.put(IP, new Date(System.currentTimeMillis() + duration * 1000)); } @Override public List<String> getCommandWhiteList() { return commandWhiteList; } public boolean isAvoidingSpammedAuthRequests() { return doNotSpamAuthRequests; } public boolean isAvoidingSpammedRegisterRequests() { return doNotSpamRegisterRequests; } public long getDelayAuthRequests() { return delayAuthRequests; } public long getRepeatAuthRequests() { return repeatAuthRequests; } public boolean isForceSingleSessionEnabled() { return forceSingleSession; } public boolean isForceSingleSessionSameIPBypassEnabled() { return forceSingleSessionSameIPBypass; } public boolean isDelayingPreRegisterSecurityEnabled() { return delayPreRegisterSecurity > 0; } public long getDelayPreRegisterSecurity() { return delayPreRegisterSecurity; } public boolean isDelayingPreLoginSecurityEnabled() { return delayPreLoginSecurity > 0; } public long getDelayPreLoginSecurity() { return delayPreLoginSecurity; } public boolean isSaveLoginEnabled() { return saveLoginEnabled; } @Override public boolean isForceSaveLoginEnabled() { return saveLoginEnabled && forceSaveLogin; } public Map<String, Location> getSaveLoginLocations() { return saveLoginLocations; } public Location getSaveLoginLocation(final World world) { if (saveLoginLocations.containsKey(world.getName())) return saveLoginLocations.get(world.getName()).clone(); else return world.getSpawnLocation(); } public Location getSaveLoginLocation(final Player player) { return getSaveLoginLocation(player.getWorld()); } @Override public boolean isHidingInventoryEnabled() { return hideInventory; } @Override public boolean isHidingPlayerEnabled() { return hidePlayer; } @Override public boolean isHidingChatEnabled() { return hideChat; } public boolean isDelayingJoinQuitMessagesEnabled() { return delayJoinQuitMessages; } public boolean isUsingCustomJoinQuitMessagesEnabled() { return useCustomJoinQuitMessages; } public boolean isHidingPasswordsFromConsoleEnabled() { return hidePasswordsFromConsole; } @Override public Encryptor getEncryptor() { return encryptor; } public int getAutoDelete() { return autoDelete; } public int getMaxStoredIPs() { return maxStoredIPs; } public int getMaxOnlinesPerIP() { return maxOnlinesPerIP; } public int getMaxRegistrationsPerIP() { return maxRegistrationsPerIP; } public boolean isPluginCommunicationEnabled() { return pluginCommunicationEnabled; } @Override public double getMoveRange() { return moveRange; } public String getNameFilter() { return filterNames; } public boolean checkNameChars(final String name) { return name.matches(filterNames + "+"); } public boolean isBlockingDifferentNameCasesEnabled() { return blockDifferentNameCases; } public boolean checkNameCase(final String name) { if (blockDifferentNameCases) { final LoginPlayerData data = getPlayerData(name); if (data == null) return true; else return data.getName().equals(name); } else return true; } public int getMinNameLength() { return minNameLength; } public int getMaxNameLength() { return maxNameLength; } public boolean checkNameLength(final String name) { final int length = name.length(); if (length < minNameLength) return false; if (length > maxNameLength) return false; return true; } @Override public String getUniqueIDKey() { if (uniqueIDKey == null) uniqueIDKey = new CrazyCrypt1(this, (String[]) null).encrypt(getServer().getName(), null, "randomKeyGen" + (Math.random() * Double.MAX_VALUE) + "V:" + getServer().getBukkitVersion() + "'_+' return uniqueIDKey; } @Override public HashSet<LoginPlayerData> getPlayerDatasPerIP(final String IP) { final HashSet<LoginPlayerData> res = new HashSet<LoginPlayerData>(); if (database == null) return res; synchronized (database.getDatabaseLock()) { for (final LoginPlayerData data : database.getAllEntries()) if (data.hasIP(IP)) res.add(data); } return res; } @Override public HashSet<LoginPlayerData> getPlayerDatasPerPartialIP(final String partialIP) { final HashSet<LoginPlayerData> res = new HashSet<LoginPlayerData>(); if (database == null) return res; synchronized (database.getDatabaseLock()) { for (final LoginPlayerData data : database.getAllEntries()) for (final String ip : data.getIPs()) if (ip.startsWith(partialIP)) res.add(data); } return res; } @Override public final void broadcastLocaleMessage(final boolean console, final String permission, final boolean loggedInOnly, final String localepath, final Object... args) { broadcastLocaleMessage(console, permission, loggedInOnly, getLocale().getLanguageEntry(localepath), args); } @Override public final void broadcastLocaleRootMessage(final boolean console, final String permission, final boolean loggedInOnly, final String localepath, final Object... args) { broadcastLocaleMessage(console, permission, loggedInOnly, CrazyLocale.getLocaleHead().getLanguageEntry(localepath), args); } @Override public final void broadcastLocaleMessage(final boolean console, final String permission, final boolean loggedInOnly, final CrazyLocale locale, final Object... args) { if (permission == null) broadcastLocaleMessage(console, new String[] {}, loggedInOnly, locale, args); else broadcastLocaleMessage(console, new String[] { permission }, loggedInOnly, locale, args); } @Override public final void broadcastLocaleMessage(final boolean console, final String[] permissions, final boolean loggedInOnly, final String localepath, final Object... args) { broadcastLocaleMessage(console, permissions, loggedInOnly, getLocale().getLanguageEntry(localepath), args); } @Override public final void broadcastLocaleRootMessage(final boolean console, final String[] permissions, final boolean loggedInOnly, final String localepath, final Object... args) { broadcastLocaleMessage(console, permissions, loggedInOnly, CrazyLocale.getLocaleHead().getLanguageEntry(localepath), args); } @Override public final void broadcastLocaleMessage(final boolean console, final String[] permissions, final boolean loggedInOnly, final CrazyLocale locale, final Object... args) { if (console) sendLocaleMessage(locale, Bukkit.getConsoleSender(), args); Player: for (final Player player : Bukkit.getOnlinePlayers()) { for (final String permission : permissions) if (!player.hasPermission(permission)) continue Player; if (loggedInOnly) if (!isLoggedIn(player)) continue; sendLocaleMessage(locale, player, args); } } public MessageListener getMessageListener() { return messageListener; } public boolean isDynamicProtectionEnabled() { return dynamicProtection; } public boolean isAdminLoginDisabled() { return disableAdminLogin; } public boolean isTokenLoginDisabled() { return disableTokenLogin; } public Map<String, Token> getLoginTokens() { return loginTokens; } public boolean everyoneLoggedIn() { for (final Player player : Bukkit.getOnlinePlayers()) if (!hasPlayerData(player) || !isLoggedIn(player)) return false; return true; } public Set<Player> getPlayerAutoLogouts() { return playerAutoLogouts; } @Override public ListOptionsModder<LoginData> getPlayerDataListModder() { return new ListOptionsModder<LoginData>() { private final BooleanParamitrisable registered = new BooleanParamitrisable(true) { @Override public void setParameter(final String parameter) throws CrazyException { if (parameter.equals("*")) value = null; else super.setParameter(parameter); } }; @Override public void modListPreOptions(final Map<String, Paramitrisable> params, final List<LoginData> datas) { params.put("reg", registered); params.put("register", registered); params.put("registered", registered); } @Override public String[] modListPostOptions(final List<LoginData> datas, final String[] pipeArgs) { if (Boolean.FALSE.equals(registered.getValue())) datas.clear(); if (!Boolean.TRUE.equals(registered.getValue())) for (final OfflinePlayer offline : getServer().getOfflinePlayers()) if (!hasPlayerData(offline)) datas.add(new LoginUnregisteredPlayerData(offline)); return pipeArgs; } }; } }
package dsoluti.seo; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; import io.vertx.core.AbstractVerticle; import io.vertx.core.Vertx; import io.vertx.core.VertxOptions; import io.vertx.core.http.HttpServer; import io.vertx.core.http.HttpServerRequest; import io.vertx.ext.web.Router; /** * Simple HTTP Server that renders HTML pages using Selenium. * */ public class VertxSeleniumServer extends AbstractVerticle { static final String BASE_URL; static { String sysPropName = "base.url"; if ((System.getProperty(sysPropName) == null)) { BASE_URL = "https: System.out.println("Setting default " + sysPropName + " " + BASE_URL); } else { BASE_URL = System.getProperty(sysPropName); System.out.println("Setting defined " + sysPropName + " " + BASE_URL); } } static final String regexToGetUrl = "http.\\/\\/.*?\\/(.*)"; static final String regexToGetDomain = "(http.:\\/\\/.*?\\/)"; static final Pattern patternToGetUrl = Pattern.compile(regexToGetUrl); static final Pattern patternToGetDomain = Pattern.compile(regexToGetDomain); private static final Logger LOGGER = Logger.getLogger(VertxSeleniumServer.class.getName()); @Override public void start() throws Exception { VertxOptions vertxOptions = new VertxOptions(); vertxOptions.setMaxEventLoopExecuteTime(60000000000L); // 60 seconds Vertx vertx = Vertx.vertx(vertxOptions); HttpServer server = vertx.createHttpServer(); Router router = Router.router(vertx); //We don't care in this case, therfore order = false boolean ordered = false; router.route().path("/favicon.ico").blockingHandler(routingContext -> { routingContext.response().setStatusCode(404).end(); }); router.route().handler(new GelfLoggerHandler()); router.route().blockingHandler(routingContext -> { HttpServerRequest request = routingContext.request(); String absoluteUrl = request.absoluteURI().replace("?_escaped_fragment_=", ""); if(request.path().contains("favicon.ico")){ request.response().setStatusCode(404).end(); }else { final Matcher matcher = patternToGetUrl.matcher(absoluteUrl); String requestedUrl = null; if (matcher.find()) { requestedUrl = matcher.group(1); } final Matcher domainMatcher = patternToGetDomain.matcher(requestedUrl); String requestedDomain = null; if (domainMatcher.find()) { requestedDomain = domainMatcher.group(1); } if(requestedUrl.length()<10){ request.response().setStatusCode(400).end("Request url is not valid: " + requestedUrl); }else { //TODO requestedDomain should be used here... String content = WebRemoteDriver.getContent(BASE_URL, requestedUrl); request.response().putHeader("content-type", "text/html").end(content); } } // Now call the next handler //routingContext.next(); }, ordered); Integer port = 8082; server.requestHandler(router::accept).listen(port); System.out.println("Listening on port " + port); } }
package eu.amidst.demos; import COM.hugin.HAPI.ExceptionHugin; import com.google.common.base.Stopwatch; import eu.amidst.core.database.DataBase; import eu.amidst.core.database.DataOnDisk; import eu.amidst.core.database.DataOnMemory; import eu.amidst.core.database.DataOnStream; import eu.amidst.core.database.filereaders.StaticDataOnDiskFromFile; import eu.amidst.core.database.filereaders.arffWekaReader.WekaDataFileReader; import eu.amidst.core.huginlink.ParallelTAN; import eu.amidst.core.models.BayesianNetwork; import eu.amidst.core.models.BayesianNetworkLoader; import eu.amidst.core.models.BayesianNetworkWriter; import eu.amidst.core.utils.BayesianNetworkSampler; import eu.amidst.core.utils.ReservoirSampling; import java.io.IOException; public class ParallelTANDemo { public static void main(String[] args) throws ExceptionHugin, IOException { BayesianNetwork bn = BayesianNetworkLoader.loadFromHugin("networks/Pigs.net"); int sampleSize = 100000; BayesianNetworkSampler sampler = new BayesianNetworkSampler(bn); sampler.setParallelMode(true); sampler.sampleToAnARFFFile("datasets/PigsSample.arff",sampleSize); System.out.println("Number of variables: "+bn.getNumberOfVars()); DataOnStream data = new StaticDataOnDiskFromFile(new WekaDataFileReader("datasets/PigsSample.arff")); ParallelTAN tan= new ParallelTAN(); tan.setNumCores(4); tan.setNumSamplesOnMemory(10000); System.out.println("Learning TAN ..."); BayesianNetwork model = tan.learn(data, "p630400490", "p48124091"); BayesianNetworkWriter.saveToHuginFile(model,"TANFromPigSample.net"); } }
package cz.seznam.euphoria.core.client.dataset; import cz.seznam.euphoria.core.client.functional.UnaryFunction; import cz.seznam.euphoria.core.client.triggers.TimeTrigger; import cz.seznam.euphoria.core.client.triggers.Trigger; import cz.seznam.euphoria.core.client.util.Pair; import cz.seznam.euphoria.core.executor.TriggerScheduler; import java.io.Serializable; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import static java.util.Collections.singleton; import static java.util.Collections.singletonList; import static java.util.Objects.requireNonNull; /** * A windowing policy of a dataset. */ public interface Windowing<T, GROUP, LABEL, W extends Window<GROUP, LABEL>> extends Serializable { /** Time windows. */ class Time<T> implements AlignedWindowing<T, Windowing.Time.TimeInterval, Windowing.Time.TimeWindow> { public static final class ProcessingTime<T> implements UnaryFunction<T, Long> { private static final ProcessingTime INSTANCE = new ProcessingTime(); // singleton private ProcessingTime() {} // ~ suppressing the warning is safe due to the returned // object not relying on the generic information in any way @SuppressWarnings("unchecked") public static <T> UnaryFunction<T, Long> get() { return INSTANCE; } @Override public Long apply(T what) { return System.currentTimeMillis(); } } // ~ end of ProcessingTime public static final class TimeInterval implements Serializable { private final long startMillis; private final long intervalMillis; public TimeInterval(long startMillis, long intervalMillis) { this.startMillis = startMillis; this.intervalMillis = intervalMillis; } public long getStartMillis() { return startMillis; } public long getIntervalMillis() { return intervalMillis; } @Override public boolean equals(Object o) { if (o instanceof TimeInterval) { TimeInterval that = (TimeInterval) o; return this.startMillis == that.startMillis && this.intervalMillis == that.intervalMillis; } return false; } @Override public int hashCode() { int result = (int) (startMillis ^ (startMillis >>> 32)); result = 31 * result + (int) (intervalMillis ^ (intervalMillis >>> 32)); return result; } @Override public String toString() { return "TimeInterval{" + "startMillis=" + startMillis + ", intervalMillis=" + intervalMillis + '}'; } } // ~ end of TimeInterval public static class TimeWindow extends EarlyTriggeredWindow implements AlignedWindow<TimeInterval> { private final TimeInterval label; private final long fireStamp; TimeWindow(long startMillis, long intervalMillis, Duration earlyTriggering) { super(earlyTriggering, startMillis + intervalMillis); this.label = new TimeInterval(startMillis, intervalMillis); this.fireStamp = startMillis + intervalMillis; } @Override public TimeInterval getLabel() { return label; } @Override public List<Trigger> createTriggers() { List<Trigger> triggers = new ArrayList<>(1); if (isEarlyTriggered()) { triggers.add(getEarlyTrigger()); } triggers.add(new TimeTrigger(fireStamp)); return triggers; } } // ~ end of TimeWindow // ~ an untyped variant of the Time windowing; does not dependent // on the input elements type public static class UTime<T> extends Time<T> { UTime(long durationMillis, Duration earlyTriggeringPeriod) { super(durationMillis, earlyTriggeringPeriod, ProcessingTime.get()); } /** * Early results will be triggered periodically until the window is finally closed. */ @SuppressWarnings("unchecked") public <T> UTime<T> earlyTriggering(After time) { // ~ the unchecked cast is ok: eventTimeFn is // ProcessingTime which is independent of <T> return new UTime<>(super.durationMillis, time.period); } /** * Function that will extract timestamp from data */ public <T> TTime<T> using(UnaryFunction<T, Long> fn) { return new TTime<>(this.durationMillis, earlyTriggeringPeriod, requireNonNull(fn)); } } // ~ a typed variant of the Time windowing; depends on the type of // input elements public static class TTime<T> extends Time<T> { TTime(long durationMillis, Duration earlyTriggeringPeriod, UnaryFunction<T, Long> eventTimeFn) { super(durationMillis, earlyTriggeringPeriod, eventTimeFn); } } // ~ end of EventTimeBased final long durationMillis; final Duration earlyTriggeringPeriod; final UnaryFunction<T, Long> eventTimeFn; public static <T> UTime<T> seconds(long seconds) { return new UTime<>(seconds * 1000, null); } public static <T> UTime<T> minutes(long minutes) { return new UTime<>(minutes * 1000 * 60, null); } public static <T> UTime<T> hours(long hours) { return new UTime<>(hours * 1000 * 60 * 60, null); } public Time(long durationMillis, Duration earlyTriggeringPeriod, UnaryFunction<T, Long> eventTimeFn) { this.durationMillis = durationMillis; this.earlyTriggeringPeriod = earlyTriggeringPeriod; this.eventTimeFn = eventTimeFn; } @Override public Set<TimeWindow> assignWindows(T input) { long ts = eventTimeFn.apply(input); return singleton( new TimeWindow(ts - (ts + durationMillis) % durationMillis, durationMillis, earlyTriggeringPeriod)); } @Override public void updateTriggering(TriggerScheduler triggering, T input) { triggering.updateProcessed(eventTimeFn.apply(input)); } } // ~ end of Time final class Count<T> implements AlignedWindowing<T, Windowing.Count.Counted, Windowing.Count.CountWindow>, MergingWindowing<T, Void, Windowing.Count.Counted, Windowing.Count.CountWindow> { private final int size; private Count(int size) { this.size = size; } public static final class Counted implements Serializable { // ~ no equals/hashCode ... every instance is unique } // ~ end of Counted public static class CountWindow implements AlignedWindow<Counted> { private final Counted label = new Counted(); int currentCount; CountWindow(int currentCount) { this.currentCount = currentCount; } @Override public Counted getLabel() { return label; } @Override public List<Trigger> createTriggers() { return Collections.emptyList(); } @Override public String toString() { return "CountWindow { currentCount = " + currentCount + ", label = " + label + " }"; } } // ~ end of CountWindow @Override public Set<CountWindow> assignWindows(T input) { return singleton(new CountWindow(1)); } @Override public Collection<Pair<Collection<CountWindow>, CountWindow>> mergeWindows(Collection<CountWindow> actives) { Iterator<CountWindow> iter = actives.iterator(); CountWindow r = null; while (r == null && iter.hasNext()) { CountWindow w = iter.next(); if (w.currentCount < size) { r = w; } } if (r == null) { return actives.stream() .map(a -> Pair.of((Collection<CountWindow>) singleton(a), a)) .collect(Collectors.toList()); } Set<CountWindow> merged = null; iter = actives.iterator(); while (iter.hasNext()) { CountWindow w = iter.next(); if (r != w && r.currentCount + w.currentCount <= size) { r.currentCount += w.currentCount; if (merged == null) { merged = new HashSet<>(); } merged.add(w); } } if (merged != null && !merged.isEmpty()) { merged.add(r); return singletonList(Pair.of(merged, r)); } return null; } @Override public boolean isComplete(CountWindow window) { return window.currentCount >= size; } public static <T> Count<T> of(int count) { return new Count<>(count); } } // ~ end of Count final class TimeSliding<T> implements AlignedWindowing<T, Long, TimeSliding.SlidingWindow> { public static class SlidingWindow implements AlignedWindow<Long> { private final long startTime; private final long duration; private SlidingWindow(long startTime, long duration) { this.startTime = startTime; this.duration = duration; } @Override public Long getLabel() { return startTime; } @Override public List<Trigger> createTriggers() { return Collections.singletonList(new TimeTrigger(startTime + duration)); } @Override public boolean equals(Object obj) { if (obj == this) return true; if (obj instanceof SlidingWindow) { SlidingWindow other = (SlidingWindow) obj; return other.startTime == startTime; } return false; } @Override public int hashCode() { return (int) (startTime ^ (startTime >>> Integer.SIZE)); } @Override public String toString() { return "SlidingWindow{" + "startTime=" + startTime + ", duration=" + duration + '}'; } } public static <T> TimeSliding<T> of(long duration, long step) { return new TimeSliding<>(duration, step, Time.ProcessingTime.get()); } private final long duration; private final long step; private final int stepsPerWindow; private final UnaryFunction<T, Long> eventTimeFn; private TimeSliding(long duration, long step, UnaryFunction<T, Long> eventTimeFn) { this.duration = duration; this.step = step; this.eventTimeFn = requireNonNull(eventTimeFn); if (duration % step != 0) { throw new IllegalArgumentException( "This time sliding window can manage only aligned sliding windows"); } stepsPerWindow = (int) (duration / step); } /** * Specify the event time extraction function. */ public <T> TimeSliding<T> using(UnaryFunction<T, Long> eventTimeFn) { return new TimeSliding<>(this.duration, this.step, eventTimeFn); } @Override public Set<SlidingWindow> assignWindows(T input) { long now = eventTimeFn.apply(input) - duration + step; long boundary = now / step * step; Set<SlidingWindow> ret = new HashSet<>(); for (int i = 0; i < stepsPerWindow; i++) { ret.add(new SlidingWindow(boundary, duration)); boundary += step; } return ret; } @Override public String toString() { return "TimeSliding{" + "duration=" + duration + ", step=" + step + ", stepsPerWindow=" + stepsPerWindow + '}'; } } // ~ end of TimeSliding /** Session windows. */ final class Session<T, G> implements MergingWindowing<T, G, Session.SessionInterval, Session.SessionWindow<G>> { public static final class SessionInterval implements Serializable, Comparable<SessionInterval> { private final long startMillis; private final long endMillis; public SessionInterval(long startMillis, long endMillis) { this.startMillis = startMillis; this.endMillis = endMillis; } public long getStartMillis() { return startMillis; } public long getEndMillis() { return endMillis; } @Override public boolean equals(Object o) { if (o instanceof SessionInterval) { SessionInterval that = (SessionInterval) o; return this.startMillis == that.startMillis && this.endMillis == that.endMillis; } return false; } @Override public int hashCode() { int result = (int) (startMillis ^ (startMillis >>> 32)); result = 31 * result + (int) (endMillis ^ (endMillis >>> 32)); return result; } boolean intersects(SessionInterval that) { return this.startMillis < that.endMillis && this.endMillis > that.startMillis; } SessionInterval createSpanned(SessionInterval that) { return new SessionInterval( Long.min(this.startMillis, that.startMillis), Long.max(this.endMillis, that.endMillis)); } @Override public int compareTo(SessionInterval that) { if (this.startMillis == that.startMillis) { return (int) (this.endMillis - that.endMillis); } // this.startMillis == that.startMillis captured above already return (int) (this.startMillis - that.startMillis); } @Override public String toString() { return "SessionInterval{" + "startMillis=" + startMillis + ", endMillis=" + endMillis + '}'; } } // ~ end of SessionInterval public static final class SessionWindow<G> extends EarlyTriggeredWindow implements Window<G, SessionInterval> { private final G group; private final SessionInterval label; SessionWindow(G group, SessionInterval label, Duration earlyFiringDuration) { super(earlyFiringDuration, label.getEndMillis()); this.group = group; this.label = label; } @Override public G getGroup() { return group; } @Override public SessionInterval getLabel() { return label; } @Override public List<Trigger> createTriggers() { if (isEarlyTriggered()) { return Arrays.asList( getEarlyTrigger(), new TimeTrigger(label.getEndMillis())); } else { return Collections.singletonList(new TimeTrigger(label.getEndMillis())); } } @Override public String toString() { return "SessionWindow{" + "group=" + group + ", label=" + label + '}'; } } // ~ end of SessionWindow public static final class OfChain { private final long gapMillis; private OfChain(Duration gap) { gapMillis = gap.toMillis(); } public EarlyTriggeringChain earlyTriggering(After time) { return new EarlyTriggeringChain(this, requireNonNull(time)); } public <T, G> Session<T, G> using(UnaryFunction<T, G> groupFn) { return new EarlyTriggeringChain(this, null).using(groupFn); } public <T, G> Session<T, G> using( UnaryFunction<T, G> groupFn, UnaryFunction<T, Long> eventFn) { return new EarlyTriggeringChain(this, null).using(groupFn, eventFn); } } // ~ end of OfChain public static class EarlyTriggeringChain { private final OfChain ofChain; private final After earlyTriggering; private EarlyTriggeringChain( OfChain ofChain, After earlyTriggering /* optional */ ) { this.ofChain = requireNonNull(ofChain); this.earlyTriggering = earlyTriggering; } public <T, G> Session<T, G> using(UnaryFunction<T, G> groupFn) { return new Session<>(groupFn, Time.ProcessingTime.get(), this.ofChain.gapMillis, null); } public <T, G> Session<T, G> using( UnaryFunction<T, G> groupFn, UnaryFunction<T, Long> eventFn) { return new Session<>(groupFn, eventFn, this.ofChain.gapMillis, this.earlyTriggering == null ? null : this.earlyTriggering.period); } } // ~ end of EarlyTriggeringChain public static OfChain of(Duration gapDuration) { return new OfChain(gapDuration); } final UnaryFunction<T, G> groupFn; final UnaryFunction<T, Long> eventTimeFn; final long gapDurationMillis; final Duration earlyTriggeringPeriod; private Session( UnaryFunction<T, G> groupFn, UnaryFunction<T, Long> eventTimeFn, long gapDurationMillis, Duration earlyTriggeringPeriod /* optional */) { this.groupFn = requireNonNull(groupFn); this.eventTimeFn = requireNonNull(eventTimeFn); this.gapDurationMillis = gapDurationMillis; this.earlyTriggeringPeriod = earlyTriggeringPeriod; } @Override public Set<SessionWindow<G>> assignWindows(T input) { long evtMillis = this.eventTimeFn.apply(input); SessionWindow<G> w = new SessionWindow<>( this.groupFn.apply(input), new SessionInterval(evtMillis, evtMillis + gapDurationMillis), earlyTriggeringPeriod); return Collections.singleton(w); } @Override public Collection<Pair<Collection<SessionWindow<G>>, SessionWindow<G>>> mergeWindows(Collection<SessionWindow<G>> actives) { if (actives.size() < 2) { return Collections.emptyList(); } ArrayList<SessionWindow<G>> sorted = new ArrayList<>(actives); sorted.sort(Comparator.comparing(SessionWindow<G>::getLabel)); Iterator<SessionWindow<G>> windows = sorted.iterator(); // ~ the final collection of merges to be performed by the framework List<Pair<Collection<SessionWindow<G>>, SessionWindow<G>>> merges = null; // ~ holds the list of existing session windows to be merged List<SessionWindow<G>> toMerge = null; // ~ the current merge candidate SessionWindow<G> mergeCandidate = windows.next(); // ~ true if `mergeCandidate` is a newly created window boolean transientCandidate = false; while (windows.hasNext()) { SessionWindow<G> w = windows.next(); if (mergeCandidate.getLabel().intersects(w.getLabel())) { if (toMerge == null) { toMerge = new ArrayList<>(); } if (!transientCandidate) { toMerge.add(mergeCandidate); } toMerge.add(w); mergeCandidate = new SessionWindow<>( mergeCandidate.getGroup(), mergeCandidate.getLabel().createSpanned(w.getLabel()), earlyTriggeringPeriod); transientCandidate = true; } else { if (toMerge != null && !toMerge.isEmpty()) { if (merges == null) { merges = new ArrayList<>(); } merges.add(Pair.of(toMerge, mergeCandidate)); toMerge = null; } mergeCandidate = w; transientCandidate = false; } } // ~ flush pending state if (toMerge != null) { if (merges == null) { merges = new ArrayList<>(); } merges.add(Pair.of(toMerge, mergeCandidate)); } // ~ deliver results (be sure not to return null) return merges == null ? Collections.emptyList() : merges; } } // ~ end of Session Set<W> assignWindows(T input); /** * Update triggering by given input. This is needed to enable the windowing * to move triggering in watermarking processing schemes based on event time. */ default void updateTriggering(TriggerScheduler triggering, T input) { triggering.updateProcessed(System.currentTimeMillis()); } }
package httpfailover; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.HttpRequest; import org.apache.http.HttpResponse; import org.apache.http.annotation.GuardedBy; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.ResponseHandler; import org.apache.http.conn.ClientConnectionManager; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.params.HttpParams; import org.apache.http.protocol.HttpContext; import org.apache.http.util.EntityUtils; import java.io.IOException; import java.lang.reflect.UndeclaredThrowableException; import java.util.Iterator; import java.util.List; /** * Extends {@link DefaultHttpClient} adding methods that allow to retry the same request * on multiple hosts. * * The retry logic is controlled by a {@link FailoverRetryHandler}. * If no handler is provided, {@link DefaultFailoverRetryHandler} is used. * * Please note that a request on a host may be retried multiple times, befoire hopping * to the next host in the list. This behaviour is controlled by the {@link org.apache.http.client.HttpRequestRetryHandler} * configured in {@link @DefaultHttpClient}. */ public class FailoverHttpClient extends DefaultHttpClient { private final Log log = LogFactory.getLog(getClass()); /** the multi-target retry handler **/ @GuardedBy("this") private FailoverRetryHandler multiTargetRetryHandler = null; /** * Creates a new HTTP client from parameters and a connection manager. * * @param params the parameters * @param conman the connection manager */ public FailoverHttpClient( final ClientConnectionManager conman, final HttpParams params) { super(conman, params); } public FailoverHttpClient( final ClientConnectionManager conman) { super(conman, null); } public FailoverHttpClient(final HttpParams params) { super(null, params); } public FailoverHttpClient() { super(null, null); } public synchronized FailoverRetryHandler getMultiTargetRetryHandler() { if (multiTargetRetryHandler == null) { multiTargetRetryHandler = new DefaultFailoverRetryHandler(); } return multiTargetRetryHandler; } /** * Set a handler for determining if an HttpRequest should fail over a different host * @param handler the handler */ public synchronized void setMultiTargetRetryHandler(FailoverRetryHandler handler) { this.multiTargetRetryHandler = handler; } /** * Tries to execute the request on all targets. * Each target failure is evaluated using the multiTargetRetryHandler. * * In case of non-retriable failure, the last exception is thrown. * * @param targets the candidate target hosts for the request. * The request is executed on each hosts until one succeeds. * @param request the request to execute * @return * @throws IOException * @throws ClientProtocolException */ public HttpResponse execute(List<HttpHost> targets, HttpRequest request) throws IOException, ClientProtocolException { return execute(targets, request, (HttpContext) null); } /** * Tries to execute the request on all targets. * Each target failure is evaluated using the multiTargetRetryHandler. * * In case of non-retriable failure, the last exception is thrown. * * @param targets the candidate target hosts for the request. * The request is executed on each hosts until one succeeds. * @param request the request to execute * @param context the request-specific execution context, * or <code>null</code> to use a default context * @return * @throws IOException * @throws ClientProtocolException */ public HttpResponse execute(List<HttpHost> targets, HttpRequest request, HttpContext context) throws IOException, ClientProtocolException { FailoverRetryHandler retryHandler = getMultiTargetRetryHandler(); int executionCount = 1; while(true) { try { return executeMulti(targets, request, context); } catch(IOException ex) { if (executionCount >= retryHandler.getRetryCount()) { throw ex; } logRetry(ex); } executionCount++; } } /** * Executes a request using the default context and processes the * response using the given response handler. All target hosts are tried until one succeeds. * * @param request the request to execute * @param responseHandler the response handler * * @return the response object as generated by the response handler. * @throws IOException in case of a problem or the connection was aborted * @throws ClientProtocolException in case of an http protocol error */ public <T> T execute(List<HttpHost> targets, HttpRequest request, ResponseHandler<? extends T> responseHandler) throws IOException, ClientProtocolException { return execute(targets, request, responseHandler, null); } /** * Executes a request using the default context and processes the * response using the given response handler. * * @param targets the candidate target hosts for the request. * The request is executed on each hosts until one succeeds. * @param request the request to execute * @param responseHandler the response handler * @param context the context to use for the execution, or * <code>null</code> to use the default context * * @return the response object as generated by the response handler. * @throws IOException in case of a problem or the connection was aborted * @throws ClientProtocolException in case of an http protocol error */ public <T> T execute(List<HttpHost> targets, HttpRequest request, ResponseHandler<? extends T> responseHandler, HttpContext context) throws IOException, ClientProtocolException { HttpResponse response = execute(targets, request, context); T result; try { result = responseHandler.handleResponse(response); } catch (Exception t) { HttpEntity entity = response.getEntity(); try { EntityUtils.consume(entity); } catch (Exception t2) { // Log this exception. The original exception is more // important and will be thrown to the caller. this.log.warn("Error consuming content after an exception.", t2); } if (t instanceof RuntimeException) { throw (RuntimeException) t; } if (t instanceof IOException) { throw (IOException) t; } throw new UndeclaredThrowableException(t); } // Handling the response was successful. Ensure that the content has // been fully consumed. HttpEntity entity = response.getEntity(); EntityUtils.consumeQuietly(entity); return result; } private HttpResponse executeMulti(List<HttpHost> targets, HttpRequest request, HttpContext context) throws IOException, ClientProtocolException { if (context == null) { context = createHttpContext(); } if (targets == null || targets.size() == 0) { throw new IllegalArgumentException("targets parameter may not be null or empty"); } Iterator<HttpHost> iterator = targets.iterator(); FailoverRetryHandler retryHandler = getMultiTargetRetryHandler(); // note that at the last item in the iterator // the loop terminates either with return or throw while (true) { try { return execute(iterator.next(), request, context); } catch(IOException ex) { if(!iterator.hasNext() || !retryHandler.tryNextHost(ex, context)) { throw ex; } logRetry(ex); } } } private void logRetry(IOException ex) { if (this.log.isWarnEnabled()) { this.log.warn("I/O exception ("+ ex.getClass().getName() + ") caught when processing request: " + ex.getMessage()); } if (this.log.isDebugEnabled()) { this.log.debug(ex.getMessage(), ex); } this.log.info("Trying request on next target"); } }
// @java.file.header package org.gridgain.examples.datagrid.store; import org.gridgain.examples.datagrid.store.jdbc.*; import org.gridgain.grid.*; import org.gridgain.grid.cache.*; import org.gridgain.grid.spi.discovery.tcp.*; import org.gridgain.grid.spi.discovery.tcp.ipfinder.multicast.*; import org.gridgain.grid.spi.discovery.tcp.ipfinder.vm.*; import java.util.*; import static org.gridgain.grid.cache.GridCacheAtomicityMode.*; /** * Starts up an empty node with example cache configuration. * * @author @java.author * @version @java.version */ public class CacheNodeWithStoreStartup { /** * Start up an empty node with specified cache configuration. * * @param args Command line arguments, none required. * @throws GridException If example execution failed. */ public static void main(String[] args) throws GridException { GridGain.start(configure()); } /** * Configure grid. * * @return Grid configuration. * @throws GridException If failed. */ public static GridConfiguration configure() throws GridException { GridConfiguration cfg = new GridConfiguration(); GridTcpDiscoverySpi discoSpi = new GridTcpDiscoverySpi(); GridTcpDiscoveryVmIpFinder ipFinder = new GridTcpDiscoveryMulticastIpFinder(); Collection<String> addrs = new ArrayList<>(); String addr = "127.0.0.1"; int port = 47500; for (int i = 0; i < 10; i++) addrs.add(addr + ':' + port++); ipFinder.setAddresses(addrs); discoSpi.setIpFinder(ipFinder); GridCacheConfiguration cacheCfg = new GridCacheConfiguration(); cacheCfg.setAtomicityMode(TRANSACTIONAL); // cacheCfg.setStore(new CacheDummyPersonStore()); cacheCfg.setStore(new CacheJdbcPersonStore()); // cacheCfg.setStore(new CacheHibernatePersonStore()); cfg.setDiscoverySpi(discoSpi); cfg.setCacheConfiguration(cacheCfg); return cfg; } }
package ie.tcd.slscs.tools; import ie.tcd.slscs.bundles.Utils; import ie.tcd.slscs.ngramtool.NGram; import java.util.ArrayList; import java.util.List; import java.util.regex.Pattern; import java.util.regex.Matcher; import java.io.*; import java.nio.charset.Charset; public class AKCorpusFile { public String filename; public String author; public String language; public String year; public String field; List<String> text; AKCorpusFile() { text = new ArrayList<String>(); } AKCorpusFile(String filename, String author, String year) { this(); this.filename = filename; this.author = author.toLowerCase(); this.year = year; String[] tmp = filename.split("\\/"); this.field = tmp[0]; this.language = tmp[1]; } AKCorpusFile(String[] a) { this(a[0], a[1], a[2]); } public void read(String path) throws IOException { String line; String fullfile = path + filename; InputStream fis = new FileInputStream(fullfile); InputStreamReader isr = new InputStreamReader(fis, Charset.forName("UTF-8")); BufferedReader br = new BufferedReader(isr); while ((line = br.readLine()) != null) { text.add(line); } } public List<String> getText() { return text; } public void clearText() { text.clear(); } public String toString() { StringBuilder sb = new StringBuilder(); sb.append(filename); sb.append("\t"); sb.append(author); sb.append("\t"); sb.append(language); sb.append("\t"); sb.append(year); sb.append("\t"); sb.append(field); return sb.toString(); } }
package com.puppycrawl.tools.checkstyle; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.io.LineNumberReader; import java.io.OutputStream; import java.io.PrintStream; import junit.framework.TestCase; import org.apache.regexp.RESyntaxException; public class CheckerTest extends TestCase { /** a brief logger that only display info about errors */ protected static class BriefLogger extends DefaultLogger { public BriefLogger(OutputStream out) { super(out, true); } public void auditStarted(AuditEvent evt) {} public void fileFinished(AuditEvent evt) {} public void fileStarted(AuditEvent evt) {} } private final ByteArrayOutputStream mBAOS = new ByteArrayOutputStream(); private final PrintStream mStream = new PrintStream(mBAOS); private final Configuration mConfig = new Configuration(); public CheckerTest(String name) { super(name); } protected void setUp() throws Exception { mConfig.setHeaderFile(getPath("java.header")); mConfig.setLeftCurlyOptionProperty(Defn.LCURLY_METHOD_PROP, LeftCurlyOption.NL); mConfig.setLeftCurlyOptionProperty(Defn.LCURLY_OTHER_PROP, LeftCurlyOption.NLOW); mConfig.setLeftCurlyOptionProperty(Defn.LCURLY_TYPE_PROP, LeftCurlyOption.NL); mConfig.setRCurly(RightCurlyOption.ALONE); } static String getPath(String aFilename) throws IOException { final File f = new File(System.getProperty("tests.dir"), aFilename); return f.getCanonicalPath(); } protected Checker createChecker() throws RESyntaxException { final AuditListener listener = new BriefLogger(mStream); final Checker c = new Checker(mConfig); c.addListener(listener); return c; } private void verify(Checker aC, String aFilename, String[] aExpected) throws Exception { mStream.flush(); final int errs = aC.process(new String[] {aFilename}); // process each of the lines final ByteArrayInputStream bais = new ByteArrayInputStream(mBAOS.toByteArray()); final LineNumberReader lnr = new LineNumberReader(new InputStreamReader(bais)); for (int i = 0; i < aExpected.length; i++) { assertEquals(aExpected[i], lnr.readLine()); } assertEquals(aExpected.length, errs); aC.destroy(); } public void testWhitespace() throws Exception { mConfig.setBooleanProperty(Defn.IGNORE_CAST_WHITESPACE_PROP, false); mConfig.setParenPadOption(PadOption.NOSPACE); mConfig.setBlockOptionProperty(Defn.TRY_BLOCK_PROP, BlockOption.IGNORE); mConfig.setBlockOptionProperty(Defn.CATCH_BLOCK_PROP, BlockOption.IGNORE); final Checker c = createChecker(); final String filepath = getPath("InputWhitespace.java"); assertNotNull(c); final String[] expected = { filepath + ":5:12: '.' is preceeded with whitespace.", filepath + ":5:14: '.' is followed by whitespace.", filepath + ":13: type Javadoc comment is missing an @author tag.", filepath + ":16:22: '=' is not preceeded with whitespace.", filepath + ":16:23: '=' is not followed by whitespace.", filepath + ":18:24: '=' is not followed by whitespace.", filepath + ":26:14: '=' is not preceeded with whitespace.", filepath + ":27:10: '=' is not preceeded with whitespace.", filepath + ":27:11: '=' is not followed by whitespace.", filepath + ":28:10: '+=' is not preceeded with whitespace.", filepath + ":28:12: '+=' is not followed by whitespace.", filepath + ":29:13: '-=' is not followed by whitespace.", filepath + ":29:14: '-' is followed by whitespace.", filepath + ":29:21: '+' is followed by whitespace.", filepath + ":30:14: '++' is preceeded with whitespace.", filepath + ":30:21: '--' is preceeded with whitespace.", filepath + ":31:15: '++' is followed by whitespace.", filepath + ":31:22: '--' is followed by whitespace.", filepath + ":37:21: 'synchronized' is not followed by whitespace.", filepath + ":39:12: 'try' is not followed by whitespace.", filepath + ":39:12: '{' is not preceeded with whitespace.", filepath + ":41:14: 'catch' is not followed by whitespace.", filepath + ":41:34: '{' is not preceeded with whitespace.", filepath + ":58:11: 'if' is not followed by whitespace.", filepath + ":58:12: '(' is followed by whitespace.", filepath + ":58:36: ')' is preceeded by whitespace.", filepath + ":59:9: '{' should be on the previous line.", filepath + ":63:9: '{' should be on the previous line.", filepath + ":74:13: '(' is followed by whitespace.", filepath + ":74:18: ')' is preceeded by whitespace.", filepath + ":75:9: '{' should be on the previous line.", filepath + ":76:19: 'return' is not followed by whitespace.", filepath + ":79:9: '{' should be on the previous line.", filepath + ":88:21: cast needs to be followed by whitespace.", filepath + ":97:29: '?' is not preceeded with whitespace.", filepath + ":97:30: '?' is not followed by whitespace.", filepath + ":97:34: ':' is not preceeded with whitespace.", filepath + ":97:35: ':' is not followed by whitespace.", filepath + ":98:15: '==' is not preceeded with whitespace.", filepath + ":98:17: '==' is not followed by whitespace.", filepath + ":104:20: '*' is not followed by whitespace.", filepath + ":104:21: '*' is not preceeded with whitespace.", filepath + ":111:22: '!' is followed by whitespace.", filepath + ":112:23: '~' is followed by whitespace.", filepath + ":119:18: '%' is not preceeded with whitespace.", filepath + ":120:20: '%' is not followed by whitespace.", filepath + ":121:18: '%' is not preceeded with whitespace.", filepath + ":121:19: '%' is not followed by whitespace.", filepath + ":123:18: '/' is not preceeded with whitespace.", filepath + ":124:20: '/' is not followed by whitespace.", filepath + ":125:18: '/' is not preceeded with whitespace.", filepath + ":125:19: '/' is not followed by whitespace.", filepath + ":129:17: '.' is preceeded with whitespace.", filepath + ":129:24: '.' is followed by whitespace.", filepath + ":136:10: '.' is preceeded with whitespace.", filepath + ":136:12: '.' is followed by whitespace.", filepath + ":153:15: 'assert' is not followed by whitespace.", filepath + ":156:20: ':' is not preceeded with whitespace.", filepath + ":156:21: ':' is not followed by whitespace.", }; verify(c, filepath, expected); c.destroy(); } public void testWhitespaceCastParenOff() throws Exception { mConfig.setBooleanProperty(Defn.IGNORE_CAST_WHITESPACE_PROP, true); mConfig.setParenPadOption(PadOption.IGNORE); mConfig.setBlockOptionProperty(Defn.TRY_BLOCK_PROP, BlockOption.IGNORE); mConfig.setBlockOptionProperty(Defn.CATCH_BLOCK_PROP, BlockOption.IGNORE); final Checker c = createChecker(); final String filepath = getPath("InputWhitespace.java"); assertNotNull(c); final String[] expected = { filepath + ":5:12: '.' is preceeded with whitespace.", filepath + ":5:14: '.' is followed by whitespace.", filepath + ":13: type Javadoc comment is missing an @author tag.", filepath + ":16:22: '=' is not preceeded with whitespace.", filepath + ":16:23: '=' is not followed by whitespace.", filepath + ":18:24: '=' is not followed by whitespace.", filepath + ":26:14: '=' is not preceeded with whitespace.", filepath + ":27:10: '=' is not preceeded with whitespace.", filepath + ":27:11: '=' is not followed by whitespace.", filepath + ":28:10: '+=' is not preceeded with whitespace.", filepath + ":28:12: '+=' is not followed by whitespace.", filepath + ":29:13: '-=' is not followed by whitespace.", filepath + ":29:14: '-' is followed by whitespace.", filepath + ":29:21: '+' is followed by whitespace.", filepath + ":30:14: '++' is preceeded with whitespace.", filepath + ":30:21: '--' is preceeded with whitespace.", filepath + ":31:15: '++' is followed by whitespace.", filepath + ":31:22: '--' is followed by whitespace.", filepath + ":37:21: 'synchronized' is not followed by whitespace.", filepath + ":39:12: 'try' is not followed by whitespace.", filepath + ":39:12: '{' is not preceeded with whitespace.", filepath + ":41:14: 'catch' is not followed by whitespace.", filepath + ":41:34: '{' is not preceeded with whitespace.", filepath + ":58:11: 'if' is not followed by whitespace.", filepath + ":59:9: '{' should be on the previous line.", filepath + ":63:9: '{' should be on the previous line.", filepath + ":75:9: '{' should be on the previous line.", filepath + ":76:19: 'return' is not followed by whitespace.", filepath + ":79:9: '{' should be on the previous line.", filepath + ":97:29: '?' is not preceeded with whitespace.", filepath + ":97:30: '?' is not followed by whitespace.", filepath + ":97:34: ':' is not preceeded with whitespace.", filepath + ":97:35: ':' is not followed by whitespace.", filepath + ":98:15: '==' is not preceeded with whitespace.", filepath + ":98:17: '==' is not followed by whitespace.", filepath + ":104:20: '*' is not followed by whitespace.", filepath + ":104:21: '*' is not preceeded with whitespace.", filepath + ":111:22: '!' is followed by whitespace.", filepath + ":112:23: '~' is followed by whitespace.", filepath + ":119:18: '%' is not preceeded with whitespace.", filepath + ":120:20: '%' is not followed by whitespace.", filepath + ":121:18: '%' is not preceeded with whitespace.", filepath + ":121:19: '%' is not followed by whitespace.", filepath + ":123:18: '/' is not preceeded with whitespace.", filepath + ":124:20: '/' is not followed by whitespace.", filepath + ":125:18: '/' is not preceeded with whitespace.", filepath + ":125:19: '/' is not followed by whitespace.", filepath + ":129:17: '.' is preceeded with whitespace.", filepath + ":129:24: '.' is followed by whitespace.", filepath + ":136:10: '.' is preceeded with whitespace.", filepath + ":136:12: '.' is followed by whitespace.", filepath + ":153:15: 'assert' is not followed by whitespace.", filepath + ":156:20: ':' is not preceeded with whitespace.", filepath + ":156:21: ':' is not followed by whitespace.", }; verify(c, filepath, expected); } public void testWhitespaceOff() throws Exception { mConfig.setBooleanProperty(Defn.IGNORE_WHITESPACE_PROP, true); mConfig.setBlockOptionProperty(Defn.TRY_BLOCK_PROP, BlockOption.IGNORE); mConfig.setBlockOptionProperty(Defn.CATCH_BLOCK_PROP, BlockOption.IGNORE); final Checker c = createChecker(); final String filepath = getPath("InputWhitespace.java"); assertNotNull(c); final String[] expected = { filepath + ":13: type Javadoc comment is missing an @author tag.", filepath + ":59:9: '{' should be on the previous line.", filepath + ":63:9: '{' should be on the previous line.", filepath + ":75:9: '{' should be on the previous line.", filepath + ":79:9: '{' should be on the previous line.", }; verify(c, filepath, expected); } public void testBraces() throws Exception { final Checker c = createChecker(); final String filepath = getPath("InputBraces.java"); final String[] expected = { filepath + ":29: 'do' construct must use '{}'s.", filepath + ":41: 'while' construct must use '{}'s.", filepath + ":41:14: 'while' is not followed by whitespace.", filepath + ":42: 'while' construct must use '{}'s.", filepath + ":44: 'while' construct must use '{}'s.", filepath + ":45: 'if' construct must use '{}'s.", filepath + ":58: 'for' construct must use '{}'s.", filepath + ":58:12: 'for' is not followed by whitespace.", filepath + ":58:23: ';' needs to be followed by whitespace.", filepath + ":58:29: ';' needs to be followed by whitespace.", filepath + ":59: 'for' construct must use '{}'s.", filepath + ":61: 'for' construct must use '{}'s.", filepath + ":63: 'if' construct must use '{}'s.", filepath + ":82: 'if' construct must use '{}'s.", filepath + ":83: 'if' construct must use '{}'s.", filepath + ":85: 'if' construct must use '{}'s.", filepath + ":87: 'else' construct must use '{}'s.", filepath + ":89: 'if' construct must use '{}'s.", filepath + ":97: 'else' construct must use '{}'s.", filepath + ":99: 'if' construct must use '{}'s.", filepath + ":100: 'if' construct must use '{}'s." }; verify(c, filepath, expected); } public void testBracesOff() throws Exception { mConfig.setBooleanProperty(Defn.IGNORE_BRACES_PROP, true); final Checker c = createChecker(); final String filepath = getPath("InputBraces.java"); final String[] expected = { filepath + ":41:14: 'while' is not followed by whitespace.", filepath + ":58:12: 'for' is not followed by whitespace.", filepath + ":58:23: ';' needs to be followed by whitespace.", filepath + ":58:29: ';' needs to be followed by whitespace.", }; verify(c, filepath, expected); } public void testTags() throws Exception { final Checker c = createChecker(); final String filepath = getPath("InputTags.java"); assertNotNull(c); final String[] expected = { filepath + ":8: type is missing a Javadoc comment.", filepath + ":11:17: variable 'mMissingJavadoc' missing Javadoc.", filepath + ":14:5: method is missing a Javadoc comment.", filepath + ":18: Unused @param tag for 'unused'.", filepath + ":24: Expected an @return tag.", filepath + ":33: Expected an @return tag.", filepath + ":40:16: Expected @throws tag for 'Exception'.", filepath + ":49:16: Expected @throws tag for 'Exception'.", filepath + ":53: Unused @throws tag for 'WrongException'.", filepath + ":55:16: Expected @throws tag for 'Exception'.", filepath + ":55:27: Expected @throws tag for 'NullPointerException'.", filepath + ":60:22: Expected @param tag for 'aOne'.", filepath + ":68:22: Expected @param tag for 'aOne'.", filepath + ":72: Unused @param tag for 'WrongParam'.", filepath + ":73:23: Expected @param tag for 'aOne'.", filepath + ":73:33: Expected @param tag for 'aTwo'.", filepath + ":78: Unused @param tag for 'Unneeded'.", filepath + ":79: Unused Javadoc tag.", filepath + ":87: Duplicate @return tag.", filepath + ":109:23: Expected @param tag for 'aOne'.", filepath + ":109:55: Expected @param tag for 'aFour'.", filepath + ":109:66: Expected @param tag for 'aFive'.", filepath + ":129:5: '{' should be on the previous line.", }; verify(c, filepath, expected); } public void testInner() throws Exception { final Checker c = createChecker(); final String filepath = getPath("InputInner.java"); assertNotNull(c); final String[] expected = { filepath + ":14: type is missing a Javadoc comment.", filepath + ":17:20: variable 'fData' missing Javadoc.", filepath + ":21: type is missing a Javadoc comment.", filepath + ":24:16: variable 'data' missing Javadoc.", filepath + ":24:16: variable 'data' must match pattern '^[A-Z](_?[A-Z0-9]+)*$'.", filepath + ":27: type is missing a Javadoc comment.", filepath + ":30:24: variable 'rData' missing Javadoc.", filepath + ":30:24: variable 'rData' must be private and have accessor methods.", filepath + ":33:27: variable 'protectedVariable' must be private and have accessor methods.", filepath + ":36:17: variable 'packageVariable' must be private and have accessor methods.", filepath + ":41:29: variable 'sWeird' must be private and have accessor methods.", filepath + ":43:19: variable 'sWeird2' must be private and have accessor methods.", }; verify(c, filepath, expected); } public void testIgnoreAccess() throws Exception { mConfig.setPatternProperty(Defn.PUBLIC_MEMBER_PATTERN_PROP, "^r[A-Z]"); mConfig.setBooleanProperty(Defn.ALLOW_PROTECTED_PROP, true); mConfig.setBooleanProperty(Defn.ALLOW_PACKAGE_PROP, true); final Checker c = createChecker(); final String filepath = getPath("InputInner.java"); assertNotNull(c); final String[] expected = { filepath + ":14: type is missing a Javadoc comment.", filepath + ":17:20: variable 'fData' missing Javadoc.", filepath + ":17:20: variable 'fData' must be private and have accessor methods.", filepath + ":21: type is missing a Javadoc comment.", filepath + ":24:16: variable 'data' missing Javadoc.", filepath + ":24:16: variable 'data' must match pattern '^[A-Z](_?[A-Z0-9]+)*$'.", filepath + ":27: type is missing a Javadoc comment.", filepath + ":30:24: variable 'rData' missing Javadoc.", }; verify(c, filepath, expected); } public void testSimple() throws Exception { mConfig.setIntProperty(Defn.MAX_FILE_LENGTH_PROP, 20) ; mConfig.setIntProperty(Defn.MAX_METHOD_LENGTH_PROP, 19) ; mConfig.setIntProperty(Defn.MAX_CONSTRUCTOR_LENGTH_PROP, 9) ; mConfig.setPatternProperty(Defn.PARAMETER_PATTERN_PROP, "^a[A-Z][a-zA-Z0-9]*$"); mConfig.setPatternProperty(Defn.STATIC_PATTERN_PROP, "^s[A-Z][a-zA-Z0-9]*$"); mConfig.setPatternProperty(Defn.MEMBER_PATTERN_PROP, "^m[A-Z][a-zA-Z0-9]*$"); mConfig.setPatternProperty(Defn.IGNORE_LINE_LENGTH_PATTERN_PROP,"^.*is OK.*regexp.*$"); mConfig.setPatternProperty(Defn.TODO_PATTERN_PROP, "FIXME:"); final Checker c = createChecker(); final String filepath = getPath("InputSimple.java"); assertNotNull(c); final String[] expected = { filepath + ":1: file length is 190 lines (max allowed is 20).", filepath + ":3: Line does not match expected header line of '// Created: 2001'.", filepath + ":18: line longer than 80 characters", filepath + ":19:25: line contains a tab character", filepath + ":25:29: variable 'badConstant' must match pattern '^[A-Z](_?[A-Z0-9]+)*$'.", filepath + ":30:24: variable 'badStatic' must match pattern '^s[A-Z][a-zA-Z0-9]*$'.", filepath + ":35:17: variable 'badMember' must match pattern '^m[A-Z][a-zA-Z0-9]*$'.", filepath + ":39:19: variable 'mNumCreated2' must be private and have accessor methods.", filepath + ":42:40: ',' needs to be followed by whitespace.", filepath + ":49:23: variable 'sTest1' must be private and have accessor methods.", filepath + ":51:26: variable 'sTest3' must be private and have accessor methods.", filepath + ":53:16: variable 'sTest2' must be private and have accessor methods.", filepath + ":56:9: variable 'mTest1' must be private and have accessor methods.", filepath + ":58:16: variable 'mTest2' must be private and have accessor methods.", filepath + ":71:19: parameter 'badFormat1' must match pattern '^a[A-Z][a-zA-Z0-9]*$'.", filepath + ":71:30: ',' needs to be followed by whitespace.", filepath + ":71:34: parameter 'badFormat2' must match pattern '^a[A-Z][a-zA-Z0-9]*$'.", filepath + ":72:25: parameter 'badFormat3' must match pattern '^a[A-Z][a-zA-Z0-9]*$'.", filepath + ":80: method length is 20 lines (max allowed is 19).", filepath + ":103: constructor length is 10 lines (max allowed is 9).", filepath + ":119:13: variable 'ABC' must match pattern '^[a-z][a-zA-Z0-9]*$'.", filepath + ":123:19: variable 'CDE' must match pattern '^[a-z][a-zA-Z0-9]*$'.", filepath + ":127:9: '{' should be on the previous line.", filepath + ":130:18: variable 'I' must match pattern '^[a-z][a-zA-Z0-9]*$'.", filepath + ":131:9: '{' should be on the previous line.", filepath + ":132:20: variable 'InnerBlockVariable' must match pattern '^[a-z][a-zA-Z0-9]*$'.", filepath + ":137:10: method name 'ALL_UPPERCASE_METHOD' must match pattern '^[a-z][a-zA-Z0-9]*$'.", filepath + ":142:30: variable 'BAD__NAME' must match pattern '^[A-Z](_?[A-Z0-9]+)*$'.", filepath + ":145: line longer than 80 characters", filepath + ":145:35: line contains a tab character", filepath + ":146:64: line contains a tab character", filepath + ":153:27: '=' is not followed by whitespace.", filepath + ":154:9: line contains a tab character", filepath + ":154:27: '=' is not followed by whitespace.", filepath + ":155:10: line contains a tab character", filepath + ":155:27: '=' is not followed by whitespace.", filepath + ":156:1: line contains a tab character", filepath + ":156:27: '=' is not followed by whitespace.", filepath + ":157:3: line contains a tab character", filepath + ":157:27: '=' is not followed by whitespace.", filepath + ":158:3: line contains a tab character", filepath + ":158:27: '=' is not followed by whitespace.", filepath + ":161: Comment matches to-do format 'FIXME:'.", filepath + ":162: Comment matches to-do format 'FIXME:'.", filepath + ":163: Comment matches to-do format 'FIXME:'.", filepath + ":167: Comment matches to-do format 'FIXME:'.", }; verify(c, filepath, expected); } public void testModifierChecks() throws Exception { final Checker c = createChecker(); final String filepath = getPath("InputModifier.java"); assertNotNull(c); final String[] expected = { filepath + ":14:10: 'final' modifier out of order with the JLS suggestions.", filepath + ":18:12: 'private' modifier out of order with the JLS suggestions.", filepath + ":24:14: 'private' modifier out of order with the JLS suggestions.", filepath + ":32:9: redundant 'public' modifier.", filepath + ":38:9: redundant 'abstract' modifier.", }; verify(c, filepath, expected); } public void testStrictJavadoc() throws Exception { mConfig.setBooleanProperty(Defn.IGNORE_WHITESPACE_PROP, true); final Checker c = createChecker(); final String filepath = getPath("InputPublicOnly.java"); assertNotNull(c); final String[] expected = { filepath + ":7: type is missing a Javadoc comment.", filepath + ":9: type is missing a Javadoc comment.", filepath + ":11:16: variable 'CONST' missing Javadoc.", filepath + ":12:9: method is missing a Javadoc comment.", filepath + ":14: type is missing a Javadoc comment.", filepath + ":16:25: variable 'mData' missing Javadoc.", filepath + ":18:13: method is missing a Javadoc comment.", filepath + ":25:13: method is missing a Javadoc comment.", filepath + ":34: type is missing a Javadoc comment.", filepath + ":36:21: variable 'mDiff' missing Javadoc.", filepath + ":38:9: method is missing a Javadoc comment.", filepath + ":43:17: variable 'mSize' missing Javadoc.", filepath + ":44:9: variable 'mLen' missing Javadoc.", filepath + ":44:9: variable 'mLen' must be private and have accessor methods.", filepath + ":45:19: variable 'mDeer' missing Javadoc.", filepath + ":45:19: variable 'mDeer' must be private and have accessor methods.", filepath + ":46:16: variable 'aFreddo' missing Javadoc.", filepath + ":46:16: variable 'aFreddo' must be private and have accessor methods.", filepath + ":49:5: method is missing a Javadoc comment.", filepath + ":54:5: method is missing a Javadoc comment.", filepath + ":59:5: method is missing a Javadoc comment.", filepath + ":64:5: method is missing a Javadoc comment.", filepath + ":69:5: method is missing a Javadoc comment.", filepath + ":74:5: method is missing a Javadoc comment.", filepath + ":79:5: method is missing a Javadoc comment.", filepath + ":84:5: method is missing a Javadoc comment.", filepath + ":94:32: Expected @param tag for 'aA'." }; verify(c, filepath, expected); } public void testNoJavadoc() throws Exception { mConfig.setBooleanProperty(Defn.IGNORE_WHITESPACE_PROP, true); mConfig.setJavadocScope(Scope.NOTHING); final Checker c = createChecker(); final String filepath = getPath("InputPublicOnly.java"); assertNotNull(c); final String[] expected = { filepath + ":44:9: variable 'mLen' must be private and have accessor methods.", filepath + ":45:19: variable 'mDeer' must be private and have accessor methods.", filepath + ":46:16: variable 'aFreddo' must be private and have accessor methods.", }; verify(c, filepath, expected); } // pre 1.4 relaxed mode is roughly equivalent with check=protected public void testRelaxedJavadoc() throws Exception { mConfig.setBooleanProperty(Defn.IGNORE_WHITESPACE_PROP, true); mConfig.setJavadocScope(Scope.PROTECTED); final Checker c = createChecker(); final String filepath = getPath("InputPublicOnly.java"); assertNotNull(c); final String[] expected = { filepath + ":7: type is missing a Javadoc comment.", filepath + ":44:9: variable 'mLen' must be private and have accessor methods.", filepath + ":45:19: variable 'mDeer' missing Javadoc.", filepath + ":45:19: variable 'mDeer' must be private and have accessor methods.", filepath + ":46:16: variable 'aFreddo' missing Javadoc.", filepath + ":46:16: variable 'aFreddo' must be private and have accessor methods.", filepath + ":59:5: method is missing a Javadoc comment.", filepath + ":64:5: method is missing a Javadoc comment.", filepath + ":79:5: method is missing a Javadoc comment.", filepath + ":84:5: method is missing a Javadoc comment." }; verify(c, filepath, expected); } public void testScopeInnerInterfacesPublic() throws Exception { mConfig.setJavadocScope(Scope.PUBLIC); mConfig.setBooleanProperty(Defn.IGNORE_PUBLIC_IN_INTERFACE_PROP, true); final Checker c = createChecker(); final String filepath = getPath("InputScopeInnerInterfaces.java"); assertNotNull(c); final String[] expected = { filepath + ":7: type is missing a Javadoc comment.", filepath + ":38: type is missing a Javadoc comment.", filepath + ":40:23: variable 'CA' missing Javadoc.", filepath + ":41:16: variable 'CB' missing Javadoc.", filepath + ":43:9: method is missing a Javadoc comment.", filepath + ":44:9: method is missing a Javadoc comment." }; verify(c, filepath, expected); } public void testScopeInnerClassesPackage() throws Exception { mConfig.setJavadocScope(Scope.getInstance("package")); final Checker c = createChecker(); final String filepath = getPath("InputScopeInnerClasses.java"); assertNotNull(c); final String[] expected = { filepath + ":18: type is missing a Javadoc comment.", filepath + ":20: type is missing a Javadoc comment.", filepath + ":22: type is missing a Javadoc comment." }; verify(c, filepath, expected); } public void testScopeInnerClassesPublic() throws Exception { mConfig.setJavadocScope(Scope.PUBLIC); final Checker c = createChecker(); final String filepath = getPath("InputScopeInnerClasses.java"); assertNotNull(c); final String[] expected = { filepath + ":18: type is missing a Javadoc comment.", }; verify(c, filepath, expected); } public void testScopeAnonInnerPrivate() throws Exception { mConfig.setJavadocScope(Scope.PRIVATE); final Checker c = createChecker(); final String filepath = getPath("InputScopeAnonInner.java"); assertNotNull(c); final String[] expected = { filepath + ":37:34: '(' is followed by whitespace.", filepath + ":39:42: '(' is followed by whitespace.", filepath + ":39:57: ')' is preceeded by whitespace.", filepath + ":43:14: ')' is preceeded by whitespace.", filepath + ":51:34: '(' is followed by whitespace.", filepath + ":53:42: '(' is followed by whitespace.", filepath + ":53:57: ')' is preceeded by whitespace.", filepath + ":57:14: ')' is preceeded by whitespace.", }; verify(c, filepath, expected); } public void testScopeAnonInnerAnonInner() throws Exception { mConfig.setJavadocScope(Scope.ANONINNER); final Checker c = createChecker(); final String filepath = getPath("InputScopeAnonInner.java"); assertNotNull(c); final String[] expected = { filepath + ":26:9: method is missing a Javadoc comment.", filepath + ":37:34: '(' is followed by whitespace.", filepath + ":39:17: method is missing a Javadoc comment.", filepath + ":39:42: '(' is followed by whitespace.", filepath + ":39:57: ')' is preceeded by whitespace.", filepath + ":43:14: ')' is preceeded by whitespace.", filepath + ":51:34: '(' is followed by whitespace.", filepath + ":53:17: method is missing a Javadoc comment.", filepath + ":53:42: '(' is followed by whitespace.", filepath + ":53:57: ')' is preceeded by whitespace.", filepath + ":57:14: ')' is preceeded by whitespace.", }; verify(c, filepath, expected); } public void testHeader() throws Exception { mConfig.setBooleanProperty(Defn.IGNORE_WHITESPACE_PROP, true); final Checker c = createChecker(); final String filepath = getPath("inputHeader.java"); assertNotNull(c); final String[] expected = { filepath + ":1: Missing a header - not enough lines in file.", filepath + ":1: type is missing a Javadoc comment.", filepath + ":1:48: type name 'inputHeader' must match pattern '^[A-Z][a-zA-Z0-9]*$'.", }; verify(c, filepath, expected); } public void testRegexpHeader() throws Exception { final Checker c = createChecker(); mConfig.setBooleanProperty(Defn.HEADER_LINES_REGEXP_PROP, true); mConfig.setHeaderFile(getPath("regexp.header")); mConfig.setHeaderIgnoreLines("4,5"); final String filepath = getPath("InputScopeAnonInner.java"); assertNotNull(c); final String[] expected = { filepath + ":3: Line does not match expected header line of '// Created: 2002'.", filepath + ":37:34: '(' is followed by whitespace.", filepath + ":39:42: '(' is followed by whitespace.", filepath + ":39:57: ')' is preceeded by whitespace.", filepath + ":43:14: ')' is preceeded by whitespace.", filepath + ":51:34: '(' is followed by whitespace.", filepath + ":53:42: '(' is followed by whitespace.", filepath + ":53:57: ')' is preceeded by whitespace.", filepath + ":57:14: ')' is preceeded by whitespace.", }; verify(c, filepath, expected); } public void testImport() throws Exception { mConfig.setBooleanProperty(Defn.IGNORE_IMPORT_LENGTH_PROP, true); final Checker c = createChecker(); final String filepath = getPath("InputImport.java"); assertNotNull(c); final String[] expected = { filepath + ":7: Avoid using the '.*' form of import.", filepath + ":7: Redundant import from the same package.", filepath + ":8: Redundant import from the same package.", filepath + ":9: Avoid using the '.*' form of import.", filepath + ":10: Avoid using the '.*' form of import.", filepath + ":10: Redundant import from the java.lang package.", filepath + ":11: Redundant import from the java.lang package.", filepath + ":13: Unused import - java.util.List", filepath + ":14: Duplicate import to line 13.", filepath + ":14: Unused import - java.util.List", filepath + ":15: Import from illegal package - sun.net.ftpclient.FtpClient", }; verify(c, filepath, expected); } public void testPackageHtml() throws Exception { mConfig.setBooleanProperty(Defn.REQUIRE_PACKAGE_HTML_PROP, true); mConfig.setJavadocScope(Scope.PRIVATE); final Checker c = createChecker(); final String packageHtmlPath = getPath("package.html"); final String filepath = getPath("InputScopeAnonInner.java"); assertNotNull(c); final String[] expected = { packageHtmlPath + ":0: missing package documentation file.", filepath + ":37:34: '(' is followed by whitespace.", filepath + ":39:42: '(' is followed by whitespace.", filepath + ":39:57: ')' is preceeded by whitespace.", filepath + ":43:14: ')' is preceeded by whitespace.", filepath + ":51:34: '(' is followed by whitespace.", filepath + ":53:42: '(' is followed by whitespace.", filepath + ":53:57: ')' is preceeded by whitespace.", filepath + ":57:14: ')' is preceeded by whitespace.", }; verify(c, filepath, expected); } public void testLCurlyMethodIgnore() throws Exception { mConfig.setBooleanProperty(Defn.IGNORE_WHITESPACE_PROP, true); mConfig.setLeftCurlyOptionProperty(Defn.LCURLY_METHOD_PROP, LeftCurlyOption.IGNORE); mConfig.setJavadocScope(Scope.NOTHING); final Checker c = createChecker(); final String filepath = getPath("InputLeftCurlyMethod.java"); assertNotNull(c); final String[] expected = { }; verify(c, filepath, expected); } public void testLCurlyMethodNL() throws Exception { mConfig.setBooleanProperty(Defn.IGNORE_WHITESPACE_PROP, true); mConfig.setLeftCurlyOptionProperty(Defn.LCURLY_METHOD_PROP, LeftCurlyOption.NL); mConfig.setJavadocScope(Scope.NOTHING); mConfig.setBooleanProperty(Defn.ALLOW_TABS_PROP, true); final Checker c = createChecker(); final String filepath = getPath("InputLeftCurlyMethod.java"); assertNotNull(c); final String[] expected = { filepath + ":14:39: '{' should be on a new line.", filepath + ":21:20: '{' should be on a new line.", filepath + ":34:31: '{' should be on a new line.", }; verify(c, filepath, expected); } public void testLCurlyOther() throws Exception { mConfig.setJavadocScope(Scope.NOTHING); mConfig.setRCurly(RightCurlyOption.SAME); final Checker c = createChecker(); final String filepath = getPath("InputLeftCurlyOther.java"); assertNotNull(c); final String[] expected = { filepath + ":19:9: '{' should be on the previous line.", filepath + ":21:13: '{' should be on the previous line.", filepath + ":23:17: '{' should be on the previous line.", filepath + ":25:17: '}' should be on the same line.", filepath + ":28:17: '}' should be on the same line.", filepath + ":30:17: '{' should be on the previous line.", filepath + ":34:17: '{' should be on the previous line.", filepath + ":40:13: '}' should be on the same line.", filepath + ":42:13: '{' should be on the previous line.", filepath + ":44:13: '}' should be on the same line.", filepath + ":46:13: '{' should be on the previous line.", filepath + ":52:9: '{' should be on the previous line.", filepath + ":54:13: '{' should be on the previous line.", }; verify(c, filepath, expected); } public void testAssertIdentifier() throws Exception { mConfig.setJavadocScope(Scope.NOTHING); final Checker c = createChecker(); final String filepath = getPath("InputAssertIdentifier.java"); assertNotNull(c); final String[] expected = { }; verify(c, filepath, expected); } public void testSemantic() throws Exception { mConfig.setBooleanProperty(Defn.IGNORE_WHITESPACE_PROP, true); mConfig.setJavadocScope(Scope.NOTHING); mConfig.setBlockOptionProperty(Defn.TRY_BLOCK_PROP, BlockOption.STMT); mConfig.setBlockOptionProperty(Defn.CATCH_BLOCK_PROP, BlockOption.STMT); mConfig.setBlockOptionProperty(Defn.FINALLY_BLOCK_PROP, BlockOption.STMT); mConfig.setBooleanProperty(Defn.IGNORE_IMPORTS_PROP, true); mConfig.setBooleanProperty(Defn.IGNORE_LONG_ELL_PROP, false); mConfig.setIllegalInstantiations( "java.lang.Boolean," + "com.puppycrawl.tools.checkstyle.InputModifier," + "java.io.File," + "java.awt.Color"); final Checker c = createChecker(); final String filepath = getPath("InputSemantic.java"); assertNotNull(c); final String[] expected = { filepath + ":19:21: Avoid instantiation of java.lang.Boolean", filepath + ":24:21: Avoid instantiation of java.lang.Boolean", filepath + ":30:16: Avoid instantiation of java.lang.Boolean", filepath + ":37:21: Avoid instantiation of " + "com.puppycrawl.tools.checkstyle.InputModifier", filepath + ":40:18: Avoid instantiation of java.io.File", filepath + ":43:21: Avoid instantiation of java.awt.Color", filepath + ":51:65: Must have at least one statement.", filepath + ":53:41: Must have at least one statement.", filepath + ":70:38: Must have at least one statement.", filepath + ":71:52: Must have at least one statement.", filepath + ":72:45: Must have at least one statement.", filepath + ":74:13: Must have at least one statement.", filepath + ":76:17: Must have at least one statement.", filepath + ":78:13: Must have at least one statement.", filepath + ":81:17: Must have at least one statement.", filepath + ":93:43: Should use uppercase 'L'.", }; verify(c, filepath, expected); } public void testSemantic2() throws Exception { mConfig.setBooleanProperty(Defn.IGNORE_WHITESPACE_PROP, true); mConfig.setJavadocScope(Scope.NOTHING); mConfig.setBlockOptionProperty(Defn.TRY_BLOCK_PROP, BlockOption.TEXT); mConfig.setBlockOptionProperty(Defn.CATCH_BLOCK_PROP, BlockOption.TEXT); mConfig.setBlockOptionProperty(Defn.FINALLY_BLOCK_PROP, BlockOption.TEXT); mConfig.setBooleanProperty(Defn.IGNORE_IMPORTS_PROP, true); mConfig.setBooleanProperty(Defn.IGNORE_LONG_ELL_PROP, true); mConfig.setIllegalInstantiations(""); final Checker c = createChecker(); final String filepath = getPath("InputSemantic.java"); assertNotNull(c); final String[] expected = { filepath + ":51:65: Empty catch block.", filepath + ":71:52: Empty catch block.", filepath + ":72:45: Empty catch block.", filepath + ":74:13: Empty try block.", filepath + ":76:17: Empty finally block.", }; verify(c, filepath, expected); } public void testOpWrapOn() throws Exception { mConfig.setJavadocScope(Scope.NOTHING); mConfig.setBooleanProperty(Defn.IGNORE_OP_WRAP_PROP, false); final Checker c = createChecker(); final String filepath = getPath("InputOpWrap.java"); assertNotNull(c); final String[] expected = { filepath + ":15:19: '+' should be on a new line.", filepath + ":16:15: '-' should be on a new line.", filepath + ":24:18: '&&' should be on a new line.", }; verify(c, filepath, expected); } public void testOpWrapOff() throws Exception { mConfig.setJavadocScope(Scope.NOTHING); mConfig.setBooleanProperty(Defn.IGNORE_OP_WRAP_PROP, true); final Checker c = createChecker(); final String filepath = getPath("InputOpWrap.java"); assertNotNull(c); final String[] expected = { }; verify(c, filepath, expected); } }
package info.faceland.bolt; import com.tealcube.minecraft.bukkit.facecore.shade.hilt.HiltItemStack; import org.bukkit.ChatColor; import org.bukkit.Material; import org.bukkit.block.*; import org.bukkit.entity.Player; import org.bukkit.event.Event; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; import org.bukkit.event.block.BlockBreakEvent; import org.bukkit.event.block.BlockPlaceEvent; import org.bukkit.event.entity.EntityExplodeEvent; import org.bukkit.event.entity.ItemSpawnEvent; import org.bukkit.event.inventory.InventoryClickEvent; import org.bukkit.event.inventory.InventoryMoveItemEvent; import org.bukkit.event.inventory.InventoryOpenEvent; import org.bukkit.event.player.PlayerInteractEvent; import org.bukkit.inventory.DoubleChestInventory; import org.bukkit.inventory.InventoryHolder; import org.bukkit.inventory.ItemStack; import java.util.ArrayList; import java.util.Iterator; import java.util.List; public class BoltListener implements Listener { private final BoltPlugin plugin; public BoltListener(BoltPlugin plugin) { this.plugin = plugin; } @EventHandler(priority = EventPriority.LOWEST) public void onEntityExplode(EntityExplodeEvent event) { Iterator<Block> blockIterator = event.blockList().iterator(); while (blockIterator.hasNext()) { Block b = blockIterator.next(); if (b.getState() instanceof Chest || b.getState() instanceof DoubleChest) { blockIterator.remove(); } } } @EventHandler(priority = EventPriority.LOWEST) public void onInventoryMoveItem(InventoryMoveItemEvent event) { ItemStack is = event.getItem(); if (is == null || is.getType() != Material.PAPER) { return; } HiltItemStack his = new HiltItemStack(event.getItem()); if (his.getName().startsWith(ChatColor.GOLD + "Chest Status:")) { event.setCancelled(true); } } @EventHandler(priority = EventPriority.LOWEST) public void onBlockPlace(BlockPlaceEvent event) { Block b = event.getBlockPlaced(); if (event.getBlockPlaced().getState() instanceof Hopper) { BlockFace[] check = {BlockFace.UP, BlockFace.DOWN, BlockFace.NORTH, BlockFace.EAST, BlockFace.SOUTH, BlockFace.WEST, BlockFace.NORTH_EAST, BlockFace.NORTH_WEST, BlockFace.SOUTH_EAST, BlockFace.SOUTH_WEST}; for (BlockFace bf : check) { if (b.getRelative(bf).getState() instanceof Chest) { if (!BoltAPI.isChestOwner(((Chest) b.getRelative(bf).getState()).getInventory(), event.getPlayer().getName())) { event.setCancelled(true); event.getPlayer().sendMessage(ChatColor.YELLOW + "You cannot place hoppers next to chests you do not own."); return; } } else if (b.getRelative(bf).getState() instanceof DoubleChest) { if (!BoltAPI.isChestOwner(((DoubleChest) b.getRelative(bf).getState()).getInventory(), event.getPlayer().getName())) { event.setCancelled(true); event.getPlayer().sendMessage(ChatColor.YELLOW + "You cannot place hoppers next to chests you do not own."); return; } } } } else if (event.getBlockPlaced().getState() instanceof Chest) { BlockFace[] check = {BlockFace.UP, BlockFace.DOWN, BlockFace.NORTH, BlockFace.EAST, BlockFace.SOUTH, BlockFace.WEST, BlockFace.NORTH_EAST, BlockFace.NORTH_WEST, BlockFace.SOUTH_EAST, BlockFace.SOUTH_WEST}; for (BlockFace bf : check) { if (b.getRelative(bf).getState() instanceof Chest) { if (!BoltAPI.isChestOwner(((Chest) b.getRelative(bf).getState()).getInventory(), event.getPlayer().getName())) { event.setCancelled(true); event.getPlayer().sendMessage(ChatColor.YELLOW + "You cannot place chests next to chests you do not own."); return; } } else if (b.getRelative(bf).getState() instanceof DoubleChest) { if (!BoltAPI.isChestOwner(((DoubleChest) b.getRelative(bf).getState()).getInventory(), event.getPlayer().getName())) { event.setCancelled(true); event.getPlayer().sendMessage(ChatColor.YELLOW + "You cannot place chests next to chests you do not own."); return; } } } Chest chest = (Chest) event.getBlockPlaced().getState(); HiltItemStack hiltItemStack = new HiltItemStack(Material.PAPER); hiltItemStack.setName(ChatColor.GOLD + "Chest Status: " + ChatColor.RED + "Locked"); List<String> lore = new ArrayList<>(); lore.add(ChatColor.WHITE + "<Click to Toggle>"); lore.add(ChatColor.GOLD + "Owner: " + ChatColor.WHITE + event.getPlayer().getName()); List<String> allowedUsers = BoltAPI.getAllowedUsers(chest.getInventory()); if (allowedUsers.size() > 0) { for (String s : allowedUsers) { lore.add(ChatColor.GRAY + s); } } else { lore.add(ChatColor.GRAY + "Type /add <playername> while looking at"); lore.add(ChatColor.GRAY + "this chest to allow people to use it."); } hiltItemStack.setLore(lore); chest.getInventory().setItem(chest.getInventory().getSize() - 1, hiltItemStack); ItemStack old = chest.getInventory().getItem(chest.getInventory().getSize() / 2 - 1); if (old != null && old.getType() == Material.PAPER) { HiltItemStack his = new HiltItemStack(old); if (his.getName().equals(ChatColor.GOLD + "Chest Status: " + ChatColor.RED + "Locked")) { chest.getInventory().setItem(chest.getInventory().getSize() / 2 - 1, null); } else if (his.getName().equals(ChatColor.GOLD + "Chest Status: " + ChatColor.GREEN + "Unlocked")) { chest.getInventory().setItem(chest.getInventory().getSize() / 2 - 1, null); } } } else if (event.getBlockPlaced().getState() instanceof DoubleChest) { BlockFace[] check = {BlockFace.UP, BlockFace.DOWN, BlockFace.NORTH, BlockFace.EAST, BlockFace.SOUTH, BlockFace.WEST}; for (BlockFace bf : check) { if (b.getRelative(bf).getState() instanceof Chest) { if (!BoltAPI.isChestOwner(((Chest) b.getRelative(bf).getState()).getInventory(), event.getPlayer().getName())) { event.setCancelled(true); event.getPlayer().sendMessage(ChatColor.YELLOW + "You cannot place chests next to chests you do not own."); return; } } else if (b.getRelative(bf).getState() instanceof DoubleChest) { if (!BoltAPI.isChestOwner(((DoubleChest) b.getRelative(bf).getState()).getInventory(), event.getPlayer().getName())) { event.setCancelled(true); event.getPlayer().sendMessage(ChatColor.YELLOW + "You cannot place chests next to chests you do not own."); return; } } } DoubleChest chest = (DoubleChest) event.getBlockPlaced().getState(); ItemStack old = chest.getInventory().getItem(chest.getInventory().getSize() / 2 - 1); if (old != null && old.getType() == Material.PAPER) { HiltItemStack his = new HiltItemStack(old); if (his.getName().equals(ChatColor.GOLD + "Chest Status: " + BoltAPI.LockState.LOCKED)) { chest.getInventory().setItem(chest.getInventory().getSize() / 2 - 1, null); } else if (his.getName().equals(ChatColor.GOLD + "Chest Status: " + BoltAPI.LockState.UNLOCKED)) { chest.getInventory().setItem(chest.getInventory().getSize() / 2 - 1, null); } else if (his.getName().equals(ChatColor.GOLD + "Chest Status: " + BoltAPI.LockState.ALLOW_VIEW)) { chest.getInventory().setItem(chest.getInventory().getSize() / 2 - 1, null); } } HiltItemStack hiltItemStack = new HiltItemStack(Material.PAPER); hiltItemStack.setName(ChatColor.GOLD + "Chest Status: " + BoltAPI.LockState.LOCKED); List<String> lore = new ArrayList<>(); lore.add(ChatColor.WHITE + "<Click to Toggle>"); lore.add(ChatColor.GOLD + "Owner: " + ChatColor.WHITE + event.getPlayer().getName()); List<String> allowedUsers = BoltAPI.getAllowedUsers(chest.getInventory()); if (allowedUsers.size() > 0) { for (String s : allowedUsers) { lore.add(ChatColor.GRAY + s); } } else { lore.add(ChatColor.GRAY + "Type /add <playername> while looking at"); lore.add(ChatColor.GRAY + "this chest to allow people to use it."); } hiltItemStack.setLore(lore); chest.getInventory().setItem(chest.getInventory().getSize() - 1, hiltItemStack); } } @EventHandler(priority = EventPriority.LOW) public void onBlockBreakDoor(BlockBreakEvent event) { if (event.isCancelled()) { return; } if (event.getBlock().getType() != Material.IRON_DOOR_BLOCK && event.getBlock().getType() != Material.WOODEN_DOOR) { return; } Block below = event.getBlock().getRelative(0, -2, 0); if (below.getType() != Material.CHEST) { below = event.getBlock().getRelative(0, -3, 0); if (below.getType() != Material.CHEST) { return; } } InventoryHolder c = (InventoryHolder) below.getState(); if (!BoltAPI.isChestOwner(c.getInventory(), event.getPlayer().getName())) { event.setCancelled(true); } } @EventHandler(priority = EventPriority.LOWEST) public void onBlockBreakChest(BlockBreakEvent event) { if (!(event.getBlock().getState() instanceof Chest) && !(event.getBlock().getState() instanceof DoubleChest)) { return; } if (!BoltAPI.isChestOwner(((InventoryHolder) event.getBlock().getState()).getInventory(), event.getPlayer().getName())) { if (!event.getPlayer().hasPermission("bolt.anylock")) { event.setCancelled(true); event.getPlayer().sendMessage(ChatColor.RED + "You cannot break this chest."); return; } } InventoryHolder holder = (InventoryHolder) event.getBlock().getState(); ItemStack itemStack = holder.getInventory().getItem(holder.getInventory().getSize() - 1); if (itemStack == null) { return; } HiltItemStack his = new HiltItemStack(itemStack); if (!his.getName().startsWith(ChatColor.GOLD + "Chest Status:")) { return; } if (holder.getInventory() instanceof DoubleChestInventory) { if (holder.getInventory().getItem(holder.getInventory().getSize() / 2 - 1) != null) { event.getBlock().getWorld().dropItemNaturally(event.getBlock().getLocation(), holder.getInventory().getItem( holder.getInventory().getSize() / 2 - 1)); } HiltItemStack hiltItemStack = new HiltItemStack(Material.PAPER); hiltItemStack.setName(ChatColor.GOLD + "Chest Status: " + ChatColor.RED + "Locked"); List<String> lore = new ArrayList<>(); lore.add(ChatColor.WHITE + "<Click to Toggle>"); lore.add(ChatColor.GOLD + "Owner: " + ChatColor.WHITE + event.getPlayer().getName()); List<String> allowedUsers = BoltAPI.getAllowedUsers(holder.getInventory()); if (allowedUsers.size() > 0) { for (String s : allowedUsers) { lore.add(ChatColor.GRAY + s); } } else { lore.add(ChatColor.GRAY + "Type /add <playername> while looking at"); lore.add(ChatColor.GRAY + "this chest to allow people to use it."); } hiltItemStack.setLore(lore); holder.getInventory().setItem(holder.getInventory().getSize() / 2 - 1, hiltItemStack); } } @EventHandler(priority = EventPriority.LOWEST) public void onInventoryOpen(InventoryOpenEvent event) { if (!(event.getPlayer() instanceof Player)) { return; } if (event.getInventory().getHolder() instanceof Chest) { if (!BoltAPI.canOpen(event.getInventory(), (Player) event.getPlayer())) { event.setCancelled(true); ((Player) event.getPlayer()).sendMessage(ChatColor.YELLOW + "This chest is locked."); } } else if (event.getInventory().getHolder() instanceof DoubleChest) { if (!BoltAPI.canOpen(event.getInventory(), (Player) event.getPlayer())) { event.setCancelled(true); ((Player) event.getPlayer()).sendMessage(ChatColor.YELLOW + "This chest is locked."); } } } @EventHandler(priority = EventPriority.LOWEST) public void onInventoryClick(InventoryClickEvent event) { if (!(event.getWhoClicked() instanceof Player)) { return; } if (event.getInventory().getHolder() instanceof Chest || event.getInventory().getHolder() instanceof DoubleChest) { if (!BoltAPI.canUse(event.getInventory(), (Player) event.getWhoClicked())) { event.setCancelled(true); event.setResult(Event.Result.DENY); return; } ItemStack itemStack = event.getCurrentItem(); if (itemStack == null || itemStack.getType() != Material.PAPER) { return; } HiltItemStack his = new HiltItemStack(itemStack); if (his.getName().equals(ChatColor.GOLD + "Chest Status: " + BoltAPI.LockState.LOCKED.getDisplay())) { event.setCancelled(true); event.setResult(Event.Result.DENY); } else if (his.getName().equals(ChatColor.GOLD + "Chest Status: " + BoltAPI.LockState.UNLOCKED.getDisplay())) { event.setCancelled(true); event.setResult(Event.Result.DENY); } else if (his.getName().equals(ChatColor.GOLD + "Chest Status: " + BoltAPI.LockState.ALLOW_VIEW.getDisplay())) { event.setCancelled(true); event.setResult(Event.Result.DENY); } if (BoltAPI.isChestOwner(event.getInventory(), event.getWhoClicked().getName())) { if (his.getName().equals(ChatColor.GOLD + "Chest Status: " + BoltAPI.LockState.LOCKED.getDisplay())) { his.setName(ChatColor.GOLD + "Chest Status: " + BoltAPI.LockState.UNLOCKED.getDisplay()); event.setCurrentItem(his); } else if (his.getName().equals(ChatColor.GOLD + "Chest Status: " + BoltAPI.LockState.UNLOCKED.getDisplay())) { his.setName(ChatColor.GOLD + "Chest Status: " + BoltAPI.LockState.ALLOW_VIEW.getDisplay()); event.setCurrentItem(his); } else if (his.getName().equals(ChatColor.GOLD + "Chest Status: " + BoltAPI.LockState.ALLOW_VIEW.getDisplay())) { his.setName(ChatColor.GOLD + "Chest Status: " + BoltAPI.LockState.LOCKED.getDisplay()); event.setCurrentItem(his); } } } } @EventHandler(priority = EventPriority.LOWEST) public void onItemSpawn(ItemSpawnEvent event) { ItemStack itemStack = event.getEntity().getItemStack(); HiltItemStack his = new HiltItemStack(itemStack); if (his.getName().equals(ChatColor.GOLD + "Chest Status: " + BoltAPI.LockState.LOCKED)) { event.setCancelled(true); } if (his.getName().equals(ChatColor.GOLD + "Chest Status: " + BoltAPI.LockState.UNLOCKED)) { event.setCancelled(true); } if (his.getName().equals(ChatColor.GOLD + "Chest Status: " + BoltAPI.LockState.ALLOW_VIEW)) { event.setCancelled(true); } } @EventHandler(priority = EventPriority.LOWEST) public void onPlayerInteractEvent(PlayerInteractEvent event) { if (event.getClickedBlock() == null || event.getClickedBlock().getType() != Material.IRON_DOOR_BLOCK && event.getClickedBlock().getType() != Material.WOODEN_DOOR) { return; } Block below = event.getClickedBlock().getRelative(0, -2, 0); if (below == null || below.getType() != Material.CHEST) { below = event.getClickedBlock().getRelative(0, -3, 0); if (below == null || below.getType() != Material.CHEST) { return; } } InventoryHolder c = (InventoryHolder) below.getState(); if (!BoltAPI.canUse(c.getInventory(), event.getPlayer())) { event.setCancelled(true); event.setUseInteractedBlock(Event.Result.DENY); event.setUseItemInHand(Event.Result.DENY); } } }
package de.charite.compbio.exomiser.io; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.nio.file.Path; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import javax.sql.DataSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; /** * Connects to Phenodigm and dumps out data in pipe delimited format to the * specified path. * * @author Jules Jacobsen <jules.jacobsen@sanger.ac.uk> */ @Component public class PhenodigmDataDumper { private static final Logger logger = LoggerFactory.getLogger(PhenodigmDataDumper.class); @Autowired private DataSource phenodigmDataSource; public PhenodigmDataDumper() { } /** * * @param outputPath */ public void dumpPhenodigmData(Path outputPath) { logger.info("Starting to dump files"); if (outputPath.toFile().mkdir()) { logger.info("Created new directory {} for Phenodigm datadumps.", outputPath); } dumpMp(outputPath, "mp.pg"); dumpMouseGeneOrthologs(outputPath, "human2mouseOrthologs.pg"); dumpDiseaseHp(outputPath, "diseaseHp.pg"); dumpMouseMp(outputPath, "mouseMp.pg"); dumpOmimTerms(outputPath, "omimTerms.pg"); dumpHpMpMapping(outputPath, "hpMpMapping.pg"); dumpHpHpMapping(outputPath, "hpHpMapping.pg"); dumpMouseGeneLevelSummary(outputPath, "mouseGeneLevelSummary.pg"); dumpFishGeneLevelSummary(outputPath, "fishGeneLevelSummary.pg"); dumpFishGeneOrthologs(outputPath, "human2fishOrthologs.pg"); dumpOrphanet(outputPath, "orphanet.pg"); dumpZp(outputPath, "zp.pg"); dumpFishZp(outputPath, "zfin_zp.pg"); dumpHpZpMapping(outputPath, "hpZpMapping.pg"); } protected File dumpOrphanet(Path outputPath, String outName) { File outfile = new File(outputPath.toFile(), outName); logger.info("Dumping Orphanet data to file: {}", outfile); String sql = "select distinct d.disease_id, entrezgene, disease_term " + "from mouse_disease_gene_summary mdm, disease d, mouse_gene_ortholog mgo " + "where d.disease_id=mdm.disease_id and mdm.model_gene_id = mgo.model_gene_id and " + "human_curated = 1 and d.disease_id like '%ORPHA%' and entrezgene is not null"; //no need to close things when using the try-with-resources try (BufferedWriter writer = new BufferedWriter(new FileWriter(outfile)); Connection connection = phenodigmDataSource.getConnection(); PreparedStatement ps = connection.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);) { ps.setFetchSize(Integer.MIN_VALUE); ResultSet rs = ps.executeQuery(); while (rs.next()) { String diseaseId = rs.getString("disease_id"); String entrezId = rs.getString("entrezgene"); String diseaseTerm = rs.getString("disease_term"); String outLine = String.format("%s||%s|%s||", diseaseId, diseaseTerm,entrezId); writer.write(outLine); writer.newLine(); } } catch (IOException | SQLException ex) { logger.error(null, ex); } return outfile; } protected File dumpMp(Path outputPath, String outName) { File outfile = new File(outputPath.toFile(), outName); logger.info("Dumping Phenodigm MP data to file: {}", outfile); String sql = "select mp_id, term from mp"; //no need to close things when using the try-with-resources try (BufferedWriter writer = new BufferedWriter(new FileWriter(outfile)); Connection connection = phenodigmDataSource.getConnection(); PreparedStatement ps = connection.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);) { ps.setFetchSize(Integer.MIN_VALUE); ResultSet rs = ps.executeQuery(); while (rs.next()) { String mpId = rs.getString("mp_id"); String mpTerm = rs.getString("term"); String outLine = String.format("%s|%s", mpId, mpTerm); writer.write(outLine); writer.newLine(); } } catch (IOException | SQLException ex) { logger.error(null, ex); } return outfile; } protected File dumpZp(Path outputPath, String outName) { File outfile = new File(outputPath.toFile(), outName); logger.info("Dumping Phenodigm ZP data to file: {}", outfile); String sql = "select zp_id, term from zp"; //no need to close things when using the try-with-resources try (BufferedWriter writer = new BufferedWriter(new FileWriter(outfile)); Connection connection = phenodigmDataSource.getConnection(); PreparedStatement ps = connection.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);) { ps.setFetchSize(Integer.MIN_VALUE); ResultSet rs = ps.executeQuery(); while (rs.next()) { String zpId = rs.getString("zp_id"); String zpTerm = rs.getString("term"); String outLine = String.format("%s|%s", zpId, zpTerm); writer.write(outLine); writer.newLine(); } } catch (IOException | SQLException ex) { logger.error(null, ex); } return outfile; } protected File dumpMouseGeneOrthologs(Path outputPath, String outName) { File outfile = new File(outputPath.toFile(), outName); logger.info("Dumping Phenodigm MouseGeneOrtholog data to file: {}", outfile); String sql = "select model_gene_id, model_gene_symbol, hgnc_gene_symbol, entrezgene from mouse_gene_ortholog where entrezgene is not NULL"; //no need to close things when using the try-with-resources try (BufferedWriter writer = new BufferedWriter(new FileWriter(outfile)); Connection connection = phenodigmDataSource.getConnection(); PreparedStatement ps = connection.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);) { ps.setFetchSize(Integer.MIN_VALUE); ResultSet rs = ps.executeQuery(); while (rs.next()) { String modelGeneId = rs.getString("model_gene_id"); String modelGeneSymbol = rs.getString("model_gene_symbol"); String hgncGeneId = rs.getString("hgnc_gene_symbol"); String entrez = rs.getString("entrezgene"); String outLine = String.format("%s|%s|%s|%s", modelGeneId, modelGeneSymbol, hgncGeneId, entrez); writer.write(outLine); writer.newLine(); } } catch (IOException | SQLException ex) { logger.error(null, ex); } return outfile; } protected File dumpFishGeneOrthologs(Path outputPath, String outName) { File outfile = new File(outputPath.toFile(), outName); logger.info("Dumping Phenodigm FishGeneOrtholog data to file: {}", outfile); String sql = "select model_gene_id, model_gene_symbol, hgnc_id, entrezgene from fish_gene_ortholog"; //no need to close things when using the try-with-resources try (BufferedWriter writer = new BufferedWriter(new FileWriter(outfile)); Connection connection = phenodigmDataSource.getConnection(); PreparedStatement ps = connection.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);) { ps.setFetchSize(Integer.MIN_VALUE); ResultSet rs = ps.executeQuery(); while (rs.next()) { String modelGeneId = rs.getString("model_gene_id"); String modelGeneSymbol = rs.getString("model_gene_symbol"); String hgncGeneId = rs.getString("hgnc_id"); String entrez = rs.getString("entrezgene"); String outLine = String.format("%s|%s|%s|%s", modelGeneId, modelGeneSymbol, hgncGeneId, entrez); writer.write(outLine); writer.newLine(); } } catch (IOException | SQLException ex) { logger.error(null, ex); } return outfile; } protected File dumpDiseaseHp(Path outputPath, String outName) { File outfile = new File(outputPath.toFile(), outName); logger.info("Dumping Phenodigm diseaseHp data to file: {}", outfile); String sql = "select distinct disease_id , group_concat(distinct d.hp_id) as hpids from disease_hp d, hp_hp_mapping h where d.hp_id=h.hp_id group by disease_id"; //no need to close things when using the try-with-resources try (BufferedWriter writer = new BufferedWriter(new FileWriter(outfile)); Connection connection = phenodigmDataSource.getConnection(); PreparedStatement ps = connection.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);) { ps.setFetchSize(Integer.MIN_VALUE); ResultSet rs = ps.executeQuery(); while (rs.next()) { String diseaseId = rs.getString("disease_id"); String hpIds = rs.getString("hpids"); String outLine = String.format("%s|%s", diseaseId, hpIds); writer.write(outLine); writer.newLine(); } } catch (IOException | SQLException ex) { logger.error(null, ex); } return outfile; } protected File dumpMouseMp(Path outputPath, String outName) { File outfile = new File(outputPath.toFile(), outName); logger.info("Dumping Phenodigm mouseMp data to file: {}", outfile); String sql = "select mgo.model_gene_id, mgo.model_gene_symbol, mmm.model_id, group_concat(mp_id) as mpids " + "from mouse_model_mp mmm, mouse_model_gene_ortholog mmgo, mouse_gene_ortholog mgo " + "where mgo.model_gene_id = mmgo.model_gene_id and mmgo.model_id = mmm.model_id group by mmm.model_id"; //no need to close things when using the try-with-resources try (BufferedWriter writer = new BufferedWriter(new FileWriter(outfile)); Connection connection = phenodigmDataSource.getConnection(); PreparedStatement ps = connection.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);) { ps.setFetchSize(Integer.MIN_VALUE); ResultSet rs = ps.executeQuery(); while (rs.next()) { String modelGeneId = rs.getString("model_gene_id"); String modelGeneSymbol = rs.getString("model_gene_symbol"); String mpIds = rs.getString("mpids"); String modelId = rs.getString("model_id"); String outLine = String.format("%s|%s|%s|%s", modelGeneId, modelGeneSymbol, modelId, mpIds); writer.write(outLine); writer.newLine(); } } catch (IOException | SQLException ex) { logger.error(null, ex); } return outfile; } protected File dumpFishZp(Path outputPath, String outName) { File outfile = new File(outputPath.toFile(), outName); logger.info("Dumping Phenodigm fishZp data to file: {}", outfile); String sql = "select mgo.model_gene_id, mgo.model_gene_symbol, mmm.model_id, group_concat(distinct zp_id) as zpids " + "from fish_model_zp mmm, fish_model_gene_ortholog mmgo, fish_gene_ortholog mgo " + "where mgo.model_gene_id = mmgo.model_gene_id and mmgo.model_id = mmm.model_id group by mmm.model_id"; //no need to close things when using the try-with-resources try (BufferedWriter writer = new BufferedWriter(new FileWriter(outfile)); Connection connection = phenodigmDataSource.getConnection(); PreparedStatement ps = connection.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);) { ps.setFetchSize(Integer.MIN_VALUE); ResultSet rs = ps.executeQuery(); while (rs.next()) { String modelGeneId = rs.getString("model_gene_id"); String modelGeneSymbol = rs.getString("model_gene_symbol"); String zpIds = rs.getString("zpids"); String modelId = rs.getString("model_id"); String outLine = String.format("%s|%s|%s|%s", modelGeneId, modelGeneSymbol, modelId, zpIds); writer.write(outLine); writer.newLine(); } } catch (IOException | SQLException ex) { logger.error(null, ex); } return outfile; } // protected File dumpDiseaseDiseaseSummary(Path outputPath, String outName) { // File outfile = new File(outputPath.toFile(), outName); // logger.info("Dumping Phenodigm diseaseDiseaseSummary data to file: {}", outfile); // String sql = "select disease_id , disease_match , disease_to_disease_perc_score from disease_disease_association"; // //no need to close things when using the try-with-resources // try (BufferedWriter writer = new BufferedWriter(new FileWriter(outfile)); // Connection connection = phenodigmConnection.getConnection(); // PreparedStatement ps = connection.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);) { // ps.setFetchSize(Integer.MIN_VALUE); // ResultSet rs = ps.executeQuery(); // while (rs.next()) { // String diseaseId = rs.getString("disease_id"); // String diseaseMatch = rs.getString("disease_match"); // String score = rs.getString("disease_to_disease_perc_score"); // String outLine = String.format("%s|%s|%s", diseaseId, diseaseMatch, score); // writer.write(outLine); // writer.newLine(); // } catch (IOException | SQLException ex) { // logger.error(null, ex); // return outfile; protected File dumpOmimTerms(Path outputPath, String outName) { File outfile = new File(outputPath.toFile(), outName); logger.info("Dumping Phenodigm omimTerms data to file: {}", outfile); String sql = "select disease_id, disease_term from disease"; //no need to close things when using the try-with-resources try (BufferedWriter writer = new BufferedWriter(new FileWriter(outfile)); Connection connection = phenodigmDataSource.getConnection(); PreparedStatement ps = connection.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);) { ps.setFetchSize(Integer.MIN_VALUE); ResultSet rs = ps.executeQuery(); while (rs.next()) { String diseaseId = rs.getString("disease_id"); String diseaseTerm = rs.getString("disease_term"); String outLine = String.format("%s|%s", diseaseId, diseaseTerm); writer.write(outLine); writer.newLine(); } } catch (IOException | SQLException ex) { logger.error(null, ex); } return outfile; } protected File dumpHpMpMapping(Path outputPath, String outName) { File outfile = new File(outputPath.toFile(), outName); logger.info("Dumping Phenodigm hpMpMapping data to file: {}", outfile); //hp_mp_mapping has a mapping_id column int id = 0; String sql = "select hp_id, mp_id, sqrt(ic*simJ) as score from hp_mp_mapping where ic > 2.75"; //no need to close things when using the try-with-resources try (BufferedWriter writer = new BufferedWriter(new FileWriter(outfile)); Connection connection = phenodigmDataSource.getConnection(); PreparedStatement ps = connection.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);) { ps.setFetchSize(Integer.MIN_VALUE); ResultSet rs = ps.executeQuery(); while (rs.next()) { id++; String hpId = rs.getString("hp_id"); String mpId = rs.getString("mp_id"); String score = rs.getString("score"); String outLine = String.format("%d|%s|%s|%s", id, hpId, mpId, score); writer.write(outLine); writer.newLine(); } } catch (IOException | SQLException ex) { logger.error(null, ex); } return outfile; } protected File dumpHpZpMapping(Path outputPath, String outName) { File outfile = new File(outputPath.toFile(), outName); logger.info("Dumping Phenodigm hpZpMapping data to file: {}", outfile); //hp_mp_mapping has a mapping_id column int id = 0; String sql = "select hp_id, zp_id, sqrt(ic*simJ) as score from hp_zp_mapping where ic > 2.75"; //no need to close things when using the try-with-resources try (BufferedWriter writer = new BufferedWriter(new FileWriter(outfile)); Connection connection = phenodigmDataSource.getConnection(); PreparedStatement ps = connection.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);) { ps.setFetchSize(Integer.MIN_VALUE); ResultSet rs = ps.executeQuery(); while (rs.next()) { id++; String hpId = rs.getString("hp_id"); String zpId = rs.getString("zp_id"); String score = rs.getString("score"); String outLine = String.format("%d|%s|%s|%s", id, hpId, zpId, score); writer.write(outLine); writer.newLine(); } } catch (IOException | SQLException ex) { logger.error(null, ex); } return outfile; } protected File dumpHpHpMapping(Path outputPath, String outName) { File outfile = new File(outputPath.toFile(), outName); logger.info("Dumping Phenodigm hpHpMapping data to file: {}", outfile); //hp_hp_mapping has a mapping_id column int id = 0; String sql = "select hp_id, hp_id_hit, sqrt(ic*simJ) as score from hp_hp_mapping where ic > 2.75"; //no need to close things when using the try-with-resources try (BufferedWriter writer = new BufferedWriter(new FileWriter(outfile)); Connection connection = phenodigmDataSource.getConnection(); PreparedStatement ps = connection.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);) { ps.setFetchSize(Integer.MIN_VALUE); ResultSet rs = ps.executeQuery(); while (rs.next()) { id++; String hpId = rs.getString("hp_id"); String hpIdHit = rs.getString("hp_id_hit"); String score = rs.getString("score"); String outLine = String.format("%d|%s|%s|%s", id, hpId, hpIdHit, score); writer.write(outLine); writer.newLine(); } } catch (IOException | SQLException ex) { logger.error(null, ex); } return outfile; } protected File dumpMouseGeneLevelSummary(Path outputPath, String outName) { File outfile = new File(outputPath.toFile(), outName); logger.info("Dumping Phenodigm MouseGeneLevelSummary data to file: {}", outfile); String sql = "select disease_id , mdm.model_gene_id , model_gene_symbol, " + "IF((max_mod_disease_to_model_perc_score is not null && max_htpc_disease_to_model_perc_score is null) || max_mod_disease_to_model_perc_score > max_htpc_disease_to_model_perc_score, max_mod_disease_to_model_perc_score, max_htpc_disease_to_model_perc_score ) as score " + "from mouse_disease_gene_summary mdm, mouse_gene_ortholog mgo " + "where mdm.model_gene_id = mgo.model_gene_id and (max_mod_disease_to_model_perc_score is not null or max_htpc_disease_to_model_perc_score is not null)"; // + "and (max_mod_disease_to_model_perc_score is not null or max_htpc_disease_to_model_perc_score is not null)"; //no need to close things when using the try-with-resources try (BufferedWriter writer = new BufferedWriter(new FileWriter(outfile)); Connection connection = phenodigmDataSource.getConnection(); PreparedStatement ps = connection.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);) { ps.setFetchSize(Integer.MIN_VALUE); ResultSet rs = ps.executeQuery(); while (rs.next()) { String diseaseId = rs.getString("disease_id"); String modelGeneId = rs.getString("model_gene_id"); String modelGeneSymbol = rs.getString("model_gene_symbol"); String score = rs.getString("score"); String outLine = String.format("%s|%s|%s|%s", diseaseId, modelGeneId, modelGeneSymbol, score); writer.write(outLine); writer.newLine(); } } catch (IOException | SQLException ex) { logger.error(null, ex); } return outfile; } protected File dumpFishGeneLevelSummary(Path outputPath, String outName) { File outfile = new File(outputPath.toFile(), outName); logger.info("Dumping Phenodigm FishGeneLevelSummary data to file: {}", outfile); String sql = "select distinct disease_id , mdm.model_gene_id , model_gene_symbol, " + "IF((max_mod_disease_to_model_perc_score is not null && max_htpc_disease_to_model_perc_score is null) || max_mod_disease_to_model_perc_score > max_htpc_disease_to_model_perc_score, max_mod_disease_to_model_perc_score, max_htpc_disease_to_model_perc_score ) as score " + "from fish_disease_gene_summary mdm, fish_gene_ortholog mgo " + "where mdm.model_gene_id = mgo.model_gene_id and (max_mod_disease_to_model_perc_score is not null or max_htpc_disease_to_model_perc_score is not null)"; // + "and (max_mod_disease_to_model_perc_score is not null or max_htpc_disease_to_model_perc_score is not null)"; //no need to close things when using the try-with-resources try (BufferedWriter writer = new BufferedWriter(new FileWriter(outfile)); Connection connection = phenodigmDataSource.getConnection(); PreparedStatement ps = connection.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);) { ps.setFetchSize(Integer.MIN_VALUE); ResultSet rs = ps.executeQuery(); while (rs.next()) { String diseaseId = rs.getString("disease_id"); String modelGeneId = rs.getString("model_gene_id"); String modelGeneSymbol = rs.getString("model_gene_symbol"); String score = rs.getString("score"); String outLine = String.format("%s|%s|%s|%s", diseaseId, modelGeneId, modelGeneSymbol, score); writer.write(outLine); writer.newLine(); } } catch (IOException | SQLException ex) { logger.error(null, ex); } return outfile; } }
package innovimax.mixthem.io; import java.io.IOException; /** * This interface provides for reading characters from an character-input. * @author Innovimax * @version 1.0 */ public interface IInputChar { /** * Returns true if there is more characters. * @return Returns true if there is more characters * @throws IOException - If an I/O error occurs */ boolean hasCharacter() throws IOException; /** * Reads an eligible character regarding the type, or returns null if not eligible or no more lines. * @param type The type of reading expected * @return The eligible character as an int, or -1 if not eligible or no more characters * @throws IOException - If an I/O error occurs */ int nextCharacter(ReadType type) throws IOException; /** * Reads characters into a portion of an array. * @param buffer Destination buffer * @param len - Maximum number of characters to read * @return The number of characters read, or -1 if there is no more characters * @throws IOException - If an I/O error occurs */ int nextCharacters(char[] buffer, int len) throws IOException; /** * Closes this input and releases any system resources associated with it. * @throws IOException - If an I/O error occurs */ void close() throws IOException; }
package me.nallar.modpatcher; import javassist.ClassLoaderPool; import me.nallar.javapatcher.patcher.Patcher; import me.nallar.javapatcher.patcher.Patches; import me.nallar.modpatcher.mappings.MCPMappings; import net.minecraft.launchwrapper.IClassTransformer; import java.io.*; import java.nio.file.*; public class ModPatcher implements IClassTransformer { /** * Gets the JavaPatcher Patcher instance * * @return the Patcher */ public static Patcher getPatcher() { return postSrgPatcher; } /** * Gets the name of the setup class to use in your IFMLLoadingPlugin * * @return Name of the ModPatcher setup class */ public static String getSetupClass() { return "me.nallar.modpatcher.ModPatcherSetupClass"; } private static final Patcher preSrgPatcher; private static final Patcher postSrgPatcher; private static final String ALREADY_LOADED_PROPERTY_NAME = "nallar.ModPatcher.alreadyLoaded"; public static final String MOD_PATCHES_DIRECTORY = "./ModPatches/"; public static final String MOD_PATCHES_SRG_DIRECTORY = "./ModPatchesSrg/"; static { PatcherLog.info("ModPatcher running under classloader " + ModPatcher.class.getClassLoader().getClass().getName()); boolean alreadyLoaded = System.getProperty(ALREADY_LOADED_PROPERTY_NAME) != null; if (alreadyLoaded) { PatcherLog.error("Detected multiple classloads of ModPatcher - classloading issue?", new Throwable()); } else { System.setProperty(ALREADY_LOADED_PROPERTY_NAME, "true"); } Patcher preSrgPatcher_; Patcher postSrgPatcher_; try { preSrgPatcher_ = new Patcher(new ClassLoaderPool(false), Patches.class, new MCPMappings(false)); postSrgPatcher_ = new Patcher(new ClassLoaderPool(true), Patches.class, new MCPMappings(true)); } catch (Exception t) { PatcherLog.error("Failed to create Patcher", t); throw new RuntimeException(t); } preSrgPatcher = preSrgPatcher_; postSrgPatcher = postSrgPatcher_; // TODO - issue #2. Determine layout/config file structure recursivelyAddXmlFiles(new File(MOD_PATCHES_SRG_DIRECTORY), preSrgPatcher); recursivelyAddXmlFiles(new File(MOD_PATCHES_DIRECTORY), postSrgPatcher); } private static void recursivelyAddXmlFiles(File directory, Patcher patcher) { if (!directory.isDirectory()) { return; } try { for (File f : directory.listFiles()) { if (f.isDirectory()) { recursivelyAddXmlFiles(f, patcher); } else if (f.getName().endsWith(".xml")) { patcher.readPatchesFromXmlInputStream(new FileInputStream(f)); } else if (f.getName().endsWith(".json")) { patcher.readPatchesFromJsonInputStream(new FileInputStream(f)); } } } catch (IOException e) { PatcherLog.warn("Failed to load patch", e); } } // TODO - determine whether to remove non-SRG patching? Not usable just now. public static byte[] preSrgTransformationHook(String name, String transformedName, byte[] originalBytes) { try { return preSrgPatcher.patch(name, originalBytes); } catch (Throwable t) { PatcherLog.error("Failed to patch " + transformedName, t); } return originalBytes; } public static boolean requiresSrgHook(String transformedName) { return postSrgPatcher.willPatch(transformedName); } public static byte[] postSrgTransformationHook(String name, String transformedName, byte[] originalBytes) { try { return postSrgPatcher.patch(transformedName, originalBytes); } catch (Throwable t) { PatcherLog.error("Failed to patch " + transformedName, t); } return originalBytes; } private boolean init; @Override public byte[] transform(String name, String transformedName, byte[] bytes) { if (!init) { init = true; getPatcher().logDebugInfo(); } return postSrgTransformationHook(name, transformedName, bytes); } static void modPatcherAsCoreModStartup() { File modPatchesDirectory = new File(MOD_PATCHES_DIRECTORY); if (!modPatchesDirectory.exists()) { modPatchesDirectory.mkdir(); try { Files.copy(ModPatcher.class.getResourceAsStream("/modpatcher.json.example"), new File(modPatchesDirectory, "/modpatcher.json.example").toPath(), StandardCopyOption.REPLACE_EXISTING); Files.copy(ModPatcher.class.getResourceAsStream("/modpatcher.xml.example"), new File(modPatchesDirectory, "/modpatcher.xml.example").toPath(), StandardCopyOption.REPLACE_EXISTING); } catch (IOException e) { PatcherLog.warn("Failed to extract example patcher files", e); } } } }
package mho.haskellesque.math; import mho.haskellesque.iterables.IterableUtils; import mho.haskellesque.ordering.Ordering; import mho.haskellesque.structures.Pair; import org.jetbrains.annotations.NotNull; import java.math.BigInteger; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.function.Function; import java.util.zip.ZipEntry; import static mho.haskellesque.iterables.IterableUtils.*; import static mho.haskellesque.ordering.Ordering.*; /** * Some mathematical utilities */ public final class MathUtils { /** * Disallow instantiation */ private MathUtils() {} /** * The greatest common divisor of two <tt>int</tt>s. If both <tt>x</tt> and <tt>y</tt> are zero, the result is * undefined. Otherwise, the result is positive. * * <ul> * <li><tt>x</tt> may be any <tt>int</tt>.</li> * <li><tt>y</tt> may be any <tt>int</tt>.</li> * <li><tt>x</tt> and <tt>y</tt> y may not both be zero.</li> * <li>The result is non-negative.</li> * </ul> * * @param x the first number * @param y the second number * @return gcd(x, y) */ public static int gcd(int x, int y) { if (x == 0 && y == 0) throw new ArithmeticException("cannot take gcd of 0 and 0"); return positiveGcd(Math.abs(x), Math.abs(y)); } /** * The greatest common divisor of two non-negative <tt>int</tt>s. * * <ul> * <li><tt>x</tt> must be non-negative.</li> * <li><tt>y</tt> must be non-negative.</li> * <li><tt>x</tt> and <tt>y</tt> y may not both be zero.</li> * <li>The result is non-negative.</li> * </ul> * * @param x the first number * @param y the second number * @return gcd(x, y) */ private static int positiveGcd(int x, int y) { return y == 0 ? x : positiveGcd(y, x % y); } /** * Returns the bits of a non-negative <tt>int</tt>. The <tt>Iterable</tt> returned is little-endian; the least- * significant bits come first. Zero gives an empty <tt>Iterable</tt>. There are no trailing unset bits. Does not * support removal. * * <ul> * <li><tt>n</tt> must be non-negative.</li> * <li>The result is a finite <tt>Iterable</tt> ending with <tt>true</tt>.</li> * </ul> * * Result length is 0 if <tt>n</tt> is 0, or &#x230A;log<sub>2</sub><tt>n</tt>&#x230B; otherwise * * @param n a number * @return <tt>n</tt>'s bits in little-endian order */ public static @NotNull Iterable<Boolean> bits(final int n) { if (n < 0) throw new ArithmeticException("cannot get bits of a negative number"); return () -> new Iterator<Boolean>() { private int remaining = n; @Override public boolean hasNext() { return remaining != 0; } @Override public Boolean next() { boolean bit = (remaining & 1) == 1; remaining >>= 1; return bit; } @Override public void remove() { throw new UnsupportedOperationException("cannot remove from this iterator"); } }; } /** * Returns the bits of a non-negative <tt>BigInteger</tt>. The <tt>Iterable</tt> returned is little-endian; the * least-significant bits come first. Zero gives an empty <tt>Iterable</tt>. There are no trailing unset bits. * Does not support removal. * * <ul> * <li><tt>n</tt> must be non-negative.</li> * <li>The result is a finite <tt>Iterable</tt> ending with <tt>true</tt>.</li> * </ul> * * Result length is 0 if <tt>n</tt> is 0, or &#x230A;log<sub>2</sub><tt>n</tt>&#x230B; otherwise * * @param n a number * @return <tt>n</tt>'s bits in little-endian order */ public static @NotNull Iterable<Boolean> bits(final @NotNull BigInteger n) { if (n.signum() == -1) throw new ArithmeticException("cannot get bits of a negative number"); return () -> new Iterator<Boolean>() { private BigInteger remaining = n; @Override public boolean hasNext() { return !remaining.equals(BigInteger.ZERO); } @Override public Boolean next() { boolean bit = remaining.testBit(0); remaining = remaining.shiftRight(1); return bit; } @Override public void remove() { throw new UnsupportedOperationException("cannot remove from this iterator"); } }; } /** * Returns the lowest <tt>n</tt> bits of a non-negative <tt>int</tt>. The <tt>Iterable</tt> returned is * little-endian; the least-significant bits come first. It is exactly <tt>n</tt> bits long, and padded with zeroes * (falses) if necessary. Does not support removal. * * <ul> * <li><tt>length</tt> must be non-negative.</li> * <li><tt>n</tt> must be non-negative.</li> * <li>The result is a finite <tt>Iterable</tt>.</li> * </ul> * * Result length is <tt>n</tt> * * @param n a number * @return <tt>n</tt>'s bits in little-endian order */ public static @NotNull Iterable<Boolean> bitsPadded(int length, int n) { if (length < 0) throw new ArithmeticException("cannot pad with a negative length"); return pad(false, length, bits(n)); } /** * Returns the lowest <tt>n</tt> bits of a non-negative <tt>BigInteger</tt>. The <tt>Iterable</tt> returned is * little-endian; the least-significant bits come first. It is exactly <tt>n</tt> bits long, and padded with zeroes * (falses) if necessary. Does not support removal. * * <ul> * <li><tt>length</tt> must be non-negative.</li> * <li><tt>n</tt> must be non-negative.</li> * <li>The result is a finite <tt>Iterable</tt>.</li> * </ul> * * Result length is <tt>n</tt> * * @param n a number * @return <tt>n</tt>'s bits in little-endian order */ public static @NotNull Iterable<Boolean> bitsPadded(@NotNull BigInteger length, @NotNull BigInteger n) { if (length.signum() == -1) throw new ArithmeticException("cannot pad with a negative length"); return pad(false, length, bits(n)); } public static @NotNull Iterable<Boolean> bigEndianBits(final int n) { return reverse(bits(n)); } public static @NotNull Iterable<Boolean> bigEndianBits(@NotNull final BigInteger n) { return reverse(bits(n)); } public static @NotNull Iterable<Boolean> bigEndianBitsPadded(int length, int n) { return reverse(bitsPadded(length, n)); } public static @NotNull Iterable<Boolean> bigEndianBitsPadded(BigInteger length, BigInteger n) { return reverse(bitsPadded(length, n)); } public static @NotNull BigInteger fromBigEndianBits(@NotNull Iterable<Boolean> bits) { BigInteger n = BigInteger.ZERO; for (boolean bit : bits) { n = n.shiftLeft(1); if (bit) n = n.add(BigInteger.ONE); } return n; } public static @NotNull BigInteger fromBits(@NotNull Iterable<Boolean> bits) { return fromBigEndianBits(reverse(bits)); } public static @NotNull Iterable<Integer> digits(int base, final int n) { return () -> new Iterator<Integer>() { private int remaining = n; @Override public boolean hasNext() { return remaining != 0; } @Override public Integer next() { int digit = remaining % base; remaining /= base; return digit; } }; } public static @NotNull Iterable<BigInteger> digits(@NotNull BigInteger base, @NotNull final BigInteger n) { return () -> new Iterator<BigInteger>() { private BigInteger remaining = n; @Override public boolean hasNext() { return !remaining.equals(BigInteger.ZERO); } @Override public BigInteger next() { BigInteger digit = remaining.mod(base); remaining = remaining.divide(base); return digit; } }; } public static @NotNull Iterable<Integer> digitsPadded(int length, int base, int n) { return pad(0, length, digits(base, n)); } public static @NotNull Iterable<BigInteger> digitsPadded( @NotNull BigInteger length, @NotNull BigInteger base, @NotNull BigInteger n ) { return pad(BigInteger.ZERO, length, digits(base, n)); } public static @NotNull List<Integer> bigEndianDigits(int base, final int n) { return reverse(digits(base, n)); } public static @NotNull List<BigInteger> bigEndianDigits(@NotNull BigInteger base, final @NotNull BigInteger n) { return reverse(digits(base, n)); } public static @NotNull Iterable<Integer> bigEndianDigitsPadded(int length, int base, int n) { return reverse(digitsPadded(length, base, n)); } public static @NotNull Iterable<BigInteger> bigEndianDigitsPadded( @NotNull BigInteger length, @NotNull BigInteger base, @NotNull BigInteger n ) { return reverse(digitsPadded(length, base, n)); } public static @NotNull BigInteger fromBigEndianDigits(int base, @NotNull Iterable<Integer> digits) { BigInteger n = BigInteger.ZERO; for (int digit : digits) { n = n.multiply(BigInteger.valueOf(base)).add(BigInteger.valueOf(digit)); } return n; } public static @NotNull BigInteger fromBigEndianDigits( @NotNull BigInteger base, @NotNull Iterable<BigInteger> digits ) { BigInteger n = BigInteger.ZERO; for (BigInteger digit : digits) { n = n.multiply(base).add(digit); } return n; } public static @NotNull BigInteger fromDigits(int base, @NotNull Iterable<Integer> digits) { return fromBigEndianDigits(base, reverse(digits)); } public static @NotNull BigInteger fromDigits(@NotNull BigInteger base, @NotNull Iterable<BigInteger> digits) { return fromBigEndianDigits(base, (Iterable<BigInteger>) reverse(digits)); } public static @NotNull Pair<BigInteger, BigInteger> logarithmicDemux(@NotNull BigInteger n) { n = n.add(BigInteger.ONE); int exp = n.getLowestSetBit(); return new Pair<>(n.shiftRight(exp + 1), BigInteger.valueOf(exp)); } public static @NotNull Pair<BigInteger, BigInteger> squareRootDemux(@NotNull BigInteger n) { List<Boolean> bits = toList(bits(n)); Iterable<Boolean> aMask = cycle(Arrays.asList(true, false, false)); Iterable<Boolean> bMask = cycle(Arrays.asList(false, true, true)); return new Pair<>(fromBits(select(bMask, bits)), fromBits(select(aMask, bits))); } public static @NotNull List<BigInteger> demux(int lines, @NotNull BigInteger n) { if (n.equals(BigInteger.ZERO)) { return toList(replicate(lines, BigInteger.ZERO)); } return reverse(IterableUtils.map(MathUtils::fromBits, IterableUtils.demux(lines, bits(n)))); } public static boolean isAPowerOfTwo(@NotNull BigInteger n) { return n.getLowestSetBit() == n.bitLength() - 1; } public static @NotNull BigInteger fastGrowingCeilingInverse( @NotNull Function<BigInteger, BigInteger> f, @NotNull BigInteger y, @NotNull BigInteger min, @NotNull BigInteger max ) { for (BigInteger x : range(min, max)) { BigInteger j = f.apply(x); if (ge(j, y)) { return x; } } throw new IllegalArgumentException("inverse not found in range"); } public static @NotNull BigInteger ceilingLog(@NotNull BigInteger base, @NotNull BigInteger x) { return fastGrowingCeilingInverse( i -> base.pow(i.intValue()), x, BigInteger.ONE, x //very loose bound ); } public static @NotNull BigInteger ceilingInverse( @NotNull Function<BigInteger, BigInteger> f, @NotNull BigInteger y, @NotNull BigInteger min, @NotNull BigInteger max ) { while (true) { if (min.equals(max)) return max; BigInteger mid = min.add(max).shiftRight(1); BigInteger fMid = f.apply(mid); switch (compare(fMid, y)) { case GT: max = mid; break; case LT: min = mid.add(BigInteger.ONE); break; default: return mid; } } } public static @NotNull BigInteger ceilingRoot(@NotNull BigInteger r, @NotNull BigInteger x) { return ceilingInverse( i -> i.pow(r.intValue()), x, BigInteger.ZERO, x //very loose bound ); } public static void main(String[] args) { for (BigInteger i : range(BigInteger.ZERO, BigInteger.valueOf(1000))) { System.out.println(i + ": " + ceilingRoot(BigInteger.valueOf(3), i)); } } }
package mil.dds.anet.database; import java.util.List; import java.util.Map; import org.joda.time.DateTime; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.skife.jdbi.v2.GeneratedKeys; import org.skife.jdbi.v2.Handle; import org.skife.jdbi.v2.Query; import mil.dds.anet.AnetObjectEngine; import mil.dds.anet.beans.Person; import mil.dds.anet.beans.Poam; import mil.dds.anet.beans.Position; import mil.dds.anet.beans.ApprovalAction.ApprovalType; import mil.dds.anet.beans.Report; import mil.dds.anet.beans.Report.ReportState; import mil.dds.anet.beans.ReportPerson; import mil.dds.anet.beans.search.ReportSearchQuery; import mil.dds.anet.database.mappers.PoamMapper; import mil.dds.anet.database.mappers.ReportMapper; import mil.dds.anet.database.mappers.ReportPersonMapper; import mil.dds.anet.utils.DaoUtils; public class ReportDao implements IAnetDao<Report> { private static String[] fields = { "id", "state", "createdAt", "updatedAt", "engagementDate", "locationId", "approvalStepId", "intent", "exsum", "atmosphere", "advisorOrganizationId", "principalOrganizationId", "atmosphereDetails", "text", "keyOutcomesSummary", "keyOutcomes", "nextStepsSummary", "nextSteps", "authorId"}; private static String tableName = "reports"; public static String REPORT_FIELDS = DaoUtils.buildFieldAliases(tableName, fields); Handle dbHandle; public ReportDao(Handle db) { this.dbHandle = db; } public List<Report> getAll(int pageNum, int pageSize) { String sql; if (DaoUtils.isMsSql(dbHandle)) { sql = "SELECT " + REPORT_FIELDS + ", " + PersonDao.PERSON_FIELDS + "FROM reports, people " + "WHERE reports.authorId = people.id " + "ORDER BY reports.createdAt DESC OFFSET :offset ROWS FETCH NEXT :limit ROWS ONLY"; } else { sql = "SELECT " + REPORT_FIELDS + ", " + PersonDao.PERSON_FIELDS + "FROM reports, people " + "WHERE reports.authorId = people.id " + "ORDER BY reports.createdAt DESC LIMIT :limit OFFSET :offset"; } Query<Report> query = dbHandle.createQuery(sql) .bind("limit", pageSize) .bind("offset", pageSize * pageNum) .map(new ReportMapper()); return query.list(); } public Report insert(Report r) { r.setCreatedAt(DateTime.now()); r.setUpdatedAt(r.getCreatedAt()); //MSSQL requires explicit CAST when a datetime2 might be NULL. StringBuilder sql = new StringBuilder("INSERT INTO reports " + "(state, createdAt, updatedAt, locationId, intent, exsum, " + "text, keyOutcomesSummary, keyOutcomes, nextStepsSummary, " + "nextSteps, authorId, engagementDate, atmosphere, " + "atmosphereDetails, advisorOrganizationId, " + "principalOrganizationId) VALUES " + "(:state, :createdAt, :updatedAt, :locationId, :intent, " + ":exsum, :reportText, :keyOutcomesSummary, :keyOutcomes, " + ":nextStepsSummary, :nextSteps, :authorId, "); if (DaoUtils.isMsSql(dbHandle)) { sql.append("CAST(:engagementDate AS datetime2), "); } else { sql.append(":engagementDate, "); } sql.append(":atmosphere, :atmosphereDetails, :advisorOrgId, :principalOrgId)"); GeneratedKeys<Map<String, Object>> keys = dbHandle.createStatement(sql.toString()) .bindFromProperties(r) .bind("state", DaoUtils.getEnumId(r.getState())) .bind("atmosphere", DaoUtils.getEnumId(r.getAtmosphere())) .bind("locationId", DaoUtils.getId(r.getLocation())) .bind("authorId", DaoUtils.getId(r.getAuthor())) .bind("advisorOrgId", DaoUtils.getId(r.getAdvisorOrg())) .bind("principalOrgId", DaoUtils.getId(r.getPrincipalOrg())) .executeAndReturnGeneratedKeys(); r.setId(DaoUtils.getGeneratedId(keys)); if (r.getAttendees() != null) { for (ReportPerson p : r.getAttendees()) { //TODO: batch this dbHandle.createStatement("INSERT INTO reportPeople " + "(personId, reportId, isPrimary) VALUES (:personId, :reportId, :isPrimary)") .bind("personId", p.getId()) .bind("reportId", r.getId()) .bind("isPrimary", p.isPrimary()) .execute(); } } if (r.getPoams() != null) { for (Poam p : r.getPoams()) { //TODO: batch this. dbHandle.createStatement("INSERT INTO reportPoams " + "(reportId, poamId) VALUES (:reportId, :poamId)") .bind("reportId", r.getId()) .bind("poamId", p.getId()) .execute(); } } return r; } public Report getById(int id) { Query<Report> query = dbHandle.createQuery("SELECT " + REPORT_FIELDS + ", " + PersonDao.PERSON_FIELDS + "FROM reports, people " + "WHERE reports.id = :id " + "AND reports.authorId = people.id") .bind("id", id) .map(new ReportMapper()); List<Report> results = query.list(); if (results.size() == 0) { return null; } Report r = results.get(0); return r; } public int update(Report r) { r.setUpdatedAt(DateTime.now()); StringBuilder sql = new StringBuilder("UPDATE reports SET " + "state = :state, updatedAt = :updatedAt, locationId = :locationId, " + "intent = :intent, exsum = :exsum, text = :reportText, " + "keyOutcomesSummary = :keyOutcomesSummary, keyOutcomes = :keyOutcomes, " + "nextStepsSummary = :nextStepsSummary, nextSteps = :nextSteps, " + "approvalStepId = :approvalStepId, "); if (DaoUtils.isMsSql(dbHandle)) { sql.append("engagementDate = CAST(:engagementDate AS datetime2), "); } else { sql.append("engagementDate = :engagementDate, "); } sql.append("atmosphere = :atmosphere, atmosphereDetails = :atmosphereDetails, " + "principalOrganizationId = :principalOrgId, advisorOrganizationId = :advisorOrgId " + "WHERE id = :id"); return dbHandle.createStatement(sql.toString()) .bindFromProperties(r) .bind("state", DaoUtils.getEnumId(r.getState())) .bind("locationId", DaoUtils.getId(r.getLocation())) .bind("authorId", DaoUtils.getId(r.getAuthor())) .bind("approvalStepId", DaoUtils.getId(r.getApprovalStep())) .bind("atmosphere", DaoUtils.getEnumId(r.getAtmosphere())) .bind("advisorOrgId", DaoUtils.getId(r.getAdvisorOrg())) .bind("principalOrgId", DaoUtils.getId(r.getPrincipalOrg())) .execute(); } public int addAttendeeToReport(ReportPerson rp, Report r) { return dbHandle.createStatement("INSERT INTO reportPeople " + "(personId, reportId, isPrimary) VALUES (:personId, :reportId, :isPrimary)") .bind("personId", rp.getId()) .bind("reportId", r.getId()) .bind("isPrimary", rp.isPrimary()) .execute(); } public int removeAttendeeFromReport(Person p, Report r) { return dbHandle.createStatement("DELETE FROM reportPeople WHERE reportId = :reportId AND personId = :personId") .bind("reportId", r.getId()) .bind("personId", p.getId()) .execute(); } public int updateAttendeeOnReport(ReportPerson rp, Report r) { return dbHandle.createStatement("UPDATE reportPeople SET isPrimary = :isPrimary WHERE reportId = :reportId AND personId = :personId") .bind("reportId", r.getId()) .bind("personId", rp.getId()) .bind("isPrimary", rp.isPrimary()) .execute(); } public int addPoamToReport(Poam p, Report r) { return dbHandle.createStatement("INSERT INTO reportPoams (poamId, reportId) VALUES (:poamId, :reportId)") .bind("reportId", r.getId()) .bind("poamId", p.getId()) .execute(); } public int removePoamFromReport(Poam p, Report r) { return dbHandle.createStatement("DELETE FROM reportPoams WHERE reportId = :reportId AND poamId = :poamId") .bind("reportId", r.getId()) .bind("poamId", p.getId()) .execute(); } /* Returns reports that the given person can currently approve */ public List<Report> getReportsForMyApproval(Person p) { return dbHandle.createQuery("SELECT " + REPORT_FIELDS + ", " + PersonDao.PERSON_FIELDS + "FROM reports, groupMemberships, approvalSteps, people " + "WHERE groupMemberships.personId = :personId " + "AND groupMemberships.groupId = approvalSteps.approverGroupId " + "AND approvalSteps.id = reports.approvalStepId " + "AND reports.authorId = people.id") .bind("personId", p.getId()) .map(new ReportMapper()) .list(); } public List<Report> getMyReportsPendingApproval(Person p) { return dbHandle.createQuery("SELECT " + REPORT_FIELDS + ", " + PersonDao.PERSON_FIELDS + "FROM reports, people " + "WHERE reports.authorId = :authorId " + "AND reports.state IN (:pending, :draft) " + "AND reports.authorId = people.id " + "ORDER BY reports.createdAt DESC") .bind("authorId", p.getId()) .bind("pending", ReportState.PENDING_APPROVAL.ordinal()) .bind("draft", ReportState.DRAFT.ordinal()) .map(new ReportMapper()) .list(); } public List<ReportPerson> getAttendeesForReport(int reportId) { return dbHandle.createQuery("SELECT " + PersonDao.PERSON_FIELDS + ", reportPeople.isPrimary FROM reportPeople " + "LEFT JOIN people ON reportPeople.personId = people.id " + "WHERE reportPeople.reportId = :reportId") .bind("reportId", reportId) .map(new ReportPersonMapper()) .list(); } public List<Poam> getPoamsForReport(Report report) { return dbHandle.createQuery("SELECT * FROM poams, reportPoams " + "WHERE reportPoams.reportId = :reportId " + "AND reportPoams.poamId = poams.id") .bind("reportId", report.getId()) .map(new PoamMapper()) .list(); } public List<Report> search(ReportSearchQuery query) { return AnetObjectEngine.getInstance().getSearcher().getReportSearcher() .runSearch(query, dbHandle); } public List<Report> getReportsByAuthorPosition(Position position) { return dbHandle.createQuery("SELECT " + REPORT_FIELDS + ", " + PersonDao.PERSON_FIELDS + "FROM peoplePositions " + "INNER JOIN reports ON peoplePositions.personId = reports.authorId " + "LEFT JOIN people on reports.authorId = people.id " + "WHERE peoplePositions.positionId = :positionId " + "AND peoplePositions.personId IS NOT NULL " + "ORDER BY reports.engagementDate DESC") .bind("positionId", position.getId()) .map(new ReportMapper()) .list(); } public Object getReportsAboutThisPosition(Position position) { return dbHandle.createQuery("SELECT " + REPORT_FIELDS + ", " + PersonDao.PERSON_FIELDS + "FROM reports, peoplePositions, reportPeople, people " + "WHERE peoplePositions.positionId = :positionId " + "AND reportPeople.personId = peoplePositions.personId " + "AND reports.id = reportPeople.reportId " + "AND reports.authorId = people.id " + "ORDER BY reports.engagementDate DESC") .bind("positionId", position.getId()) .map(new ReportMapper()) .list(); } public List<Report> getReportsByAuthor(Person p, int pageNum, int pageSize) { String sql = "SELECT " + REPORT_FIELDS + ", " + PersonDao.PERSON_FIELDS + "FROM reports " + "LEFT JOIN people ON reports.authorId = people.id " + "WHERE authorId = :personId " + "ORDER BY engagementDate DESC"; if (DaoUtils.isMsSql(dbHandle)) { sql += " OFFSET :offset ROWS FETCH NEXT :limit ROWS ONLY"; } else { sql += " LIMIT :limit OFFSET :offset"; } return dbHandle.createQuery(sql) .bind("personId", p.getId()) .bind("limit", pageSize) .bind("offset", pageNum * pageSize) .map(new ReportMapper()) .list(); } public List<Report> getReportsByAttendee(Person p, int pageNum, int pageSize) { String sql = "SELECT " + REPORT_FIELDS + ", " + PersonDao.PERSON_FIELDS + "FROM reports " + "JOIN reportPeople ON reports.id = reportPeople.reportId " + "JOIN people ON reports.authorId = people.id " + "WHERE reportPeople.personId = :personId " + "ORDER BY engagementDate DESC"; if (DaoUtils.isMsSql(dbHandle)) { sql += " OFFSET :offset ROWS FETCH NEXT :limit ROWS ONLY"; } else { sql += " LIMIT :limit OFFSET :offset"; } return dbHandle.createQuery(sql) .bind("personId", p.getId()) .bind("limit", pageSize) .bind("offset", pageNum * pageSize) .map(new ReportMapper()) .list(); } DateTimeFormatter sqlitePattern = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss"); public List<Report> getRecentReleased() { String sql = "SELECT " + REPORT_FIELDS + ", " + PersonDao.PERSON_FIELDS + "FROM reports " + "JOIN approvalActions ON approvalActions.reportId = reports.id " + "JOIN people ON reports.authorId = people.id " + "WHERE approvalActions.type = :approvalType " + "AND reports.state = :reportState "; if (DaoUtils.isMsSql(dbHandle)) { sql += "AND approvalActions.createdAt > :startTime " + "AND reports.engagementDate > :twoWeeksAgo "; } else { sql += "AND approvalActions.createdAt > DateTime(:startTimeSqlite) " + "AND reports.engagementDate > DateTime(:twoWeeksAgoSqlite) "; } return dbHandle.createQuery(sql) .bind("approvalType", DaoUtils.getEnumId(ApprovalType.APPROVE)) .bind("reportState", DaoUtils.getEnumId(ReportState.RELEASED)) .bind("startTime", DateTime.now().minusDays(1)) .bind("twoWeeksAgo", DateTime.now().minusDays(14)) .bind("startTimeSqlite", sqlitePattern.print(DateTime.now().minusDays(1))) .bind("twoWeeksAgoSqlite", sqlitePattern.print(DateTime.now().minusDays(14))) .map(new ReportMapper()) .list(); } }
package mil.dds.anet.database; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Optional; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Response.Status; import org.joda.time.DateTime; import org.skife.jdbi.v2.GeneratedKeys; import org.skife.jdbi.v2.Handle; import org.skife.jdbi.v2.Query; import org.skife.jdbi.v2.TransactionCallback; import org.skife.jdbi.v2.TransactionStatus; import org.skife.jdbi.v2.sqlobject.Bind; import org.skife.jdbi.v2.sqlobject.BindBean; import org.skife.jdbi.v2.sqlobject.SqlBatch; import com.google.common.base.Joiner; import mil.dds.anet.AnetObjectEngine; import mil.dds.anet.beans.Organization; import mil.dds.anet.beans.Organization.OrganizationType; import mil.dds.anet.beans.Person; import mil.dds.anet.beans.Poam; import mil.dds.anet.beans.Position; import mil.dds.anet.beans.Report; import mil.dds.anet.beans.Report.ReportState; import mil.dds.anet.beans.ReportPerson; import mil.dds.anet.beans.RollupGraph; import mil.dds.anet.beans.Tag; import mil.dds.anet.beans.lists.AbstractAnetBeanList.ReportList; import mil.dds.anet.beans.search.OrganizationSearchQuery; import mil.dds.anet.beans.search.ReportSearchQuery; import mil.dds.anet.database.AdminDao.AdminSettingKeys; import mil.dds.anet.database.mappers.PoamMapper; import mil.dds.anet.database.mappers.ReportMapper; import mil.dds.anet.database.mappers.ReportPersonMapper; import mil.dds.anet.database.mappers.TagMapper; import mil.dds.anet.search.sqlite.SqliteReportSearcher; import mil.dds.anet.utils.DaoUtils; import mil.dds.anet.utils.Utils; public class ReportDao implements IAnetDao<Report> { private static final String[] fields = { "id", "state", "createdAt", "updatedAt", "engagementDate", "locationId", "approvalStepId", "intent", "exsum", "atmosphere", "cancelledReason", "advisorOrganizationId", "principalOrganizationId", "releasedAt", "atmosphereDetails", "text", "keyOutcomes", "nextSteps", "authorId"}; private static final String tableName = "reports"; public static final String REPORT_FIELDS = DaoUtils.buildFieldAliases(tableName, fields); Handle dbHandle; public ReportDao(Handle db) { this.dbHandle = db; } @Override public ReportList getAll(int pageNum, int pageSize) { // Return the reports without sensitive information return getAll(pageNum, pageSize, null); } public ReportList getAll(int pageNum, int pageSize, Person user) { String sql; if (DaoUtils.isMsSql(dbHandle)) { sql = "/* getAllReports */ SELECT " + REPORT_FIELDS + ", " + PersonDao.PERSON_FIELDS + ", COUNT(*) OVER() AS totalCount FROM reports, people " + "WHERE reports.authorId = people.id " + "ORDER BY reports.createdAt DESC OFFSET :offset ROWS FETCH NEXT :limit ROWS ONLY"; } else { sql = "/* getAllReports */ SELECT " + REPORT_FIELDS + ", " + PersonDao.PERSON_FIELDS + "FROM reports, people " + "WHERE reports.authorId = people.id " + "ORDER BY reports.createdAt DESC LIMIT :limit OFFSET :offset"; } Query<Report> query = dbHandle.createQuery(sql) .bind("limit", pageSize) .bind("offset", pageSize * pageNum) .map(new ReportMapper()); return ReportList.fromQuery(user, query, pageNum, pageSize); } @Override public Report insert(Report r) { // Create a report without sensitive information return insert(r, null); } public Report insert(Report r, Person user) { return dbHandle.inTransaction(new TransactionCallback<Report>() { @Override public Report inTransaction(Handle conn, TransactionStatus status) throws Exception { r.setCreatedAt(DateTime.now()); r.setUpdatedAt(r.getCreatedAt()); //MSSQL requires explicit CAST when a datetime2 might be NULL. StringBuilder sql = new StringBuilder("/* insertReport */ INSERT INTO reports " + "(state, createdAt, updatedAt, locationId, intent, exsum, " + "text, keyOutcomes, nextSteps, authorId, " + "engagementDate, releasedAt, atmosphere, cancelledReason, " + "atmosphereDetails, advisorOrganizationId, " + "principalOrganizationId) VALUES " + "(:state, :createdAt, :updatedAt, :locationId, :intent, " + ":exsum, :reportText, :keyOutcomes, " + ":nextSteps, :authorId, "); if (DaoUtils.isMsSql(dbHandle)) { sql.append("CAST(:engagementDate AS datetime2), CAST(:releasedAt AS datetime2), "); } else { sql.append(":engagementDate, :releasedAt, "); } sql.append(":atmosphere, :cancelledReason, :atmosphereDetails, :advisorOrgId, :principalOrgId)"); GeneratedKeys<Map<String, Object>> keys = dbHandle.createStatement(sql.toString()) .bindFromProperties(r) .bind("state", DaoUtils.getEnumId(r.getState())) .bind("atmosphere", DaoUtils.getEnumId(r.getAtmosphere())) .bind("cancelledReason", DaoUtils.getEnumId(r.getCancelledReason())) .bind("locationId", DaoUtils.getId(r.getLocation())) .bind("authorId", DaoUtils.getId(r.getAuthor())) .bind("advisorOrgId", DaoUtils.getId(r.getAdvisorOrg())) .bind("principalOrgId", DaoUtils.getId(r.getPrincipalOrg())) .executeAndReturnGeneratedKeys(); r.setId(DaoUtils.getGeneratedId(keys)); // Write sensitive information (if allowed) AnetObjectEngine.getInstance().getReportSensitiveInformationDao().insertOrUpdate(r.getReportSensitiveInformation(), user, r); final ReportBatch rb = dbHandle.attach(ReportBatch.class); if (r.getAttendees() != null) { //Setify based on attendeeId to prevent violations of unique key constraint. Map<Integer,ReportPerson> attendeeMap = new HashMap<Integer,ReportPerson>(); r.getAttendees().stream().forEach(rp -> attendeeMap.put(rp.getId(), rp)); rb.insertReportAttendees(r.getId(), new ArrayList<ReportPerson>(attendeeMap.values())); } if (r.getPoams() != null) { rb.insertReportPoams(r.getId(), r.getPoams()); } if (r.getTags() != null) { rb.insertReportTags(r.getId(), r.getTags()); } return r; } }); } public interface ReportBatch { @SqlBatch("INSERT INTO reportPeople (reportId, personId, isPrimary) VALUES (:reportId, :id, :primary)") void insertReportAttendees(@Bind("reportId") Integer reportId, @BindBean List<ReportPerson> reportPeople); @SqlBatch("INSERT INTO reportPoams (reportId, poamId) VALUES (:reportId, :id)") void insertReportPoams(@Bind("reportId") Integer reportId, @BindBean List<Poam> poams); @SqlBatch("INSERT INTO reportTags (reportId, tagId) VALUES (:reportId, :id)") void insertReportTags(@Bind("reportId") Integer reportId, @BindBean List<Tag> tags); } @Override public Report getById(int id) { // Return the report without sensitive information return getById(id, null); } public Report getById(int id, Person user) { Query<Report> query = dbHandle.createQuery("/* getReportById */ SELECT " + REPORT_FIELDS + ", " + PersonDao.PERSON_FIELDS + "FROM reports, people " + "WHERE reports.id = :id " + "AND reports.authorId = people.id") .bind("id", id) .map(new ReportMapper()); List<Report> results = query.list(); if (results.size() == 0) { return null; } Report r = results.get(0); r.setUser(user); return r; } @Override public int update(Report r) { // Update the report without sensitive information return update(r, null); } public int update(Report r, Person user) { return dbHandle.inTransaction(new TransactionCallback<Integer>() { @Override public Integer inTransaction(Handle conn, TransactionStatus status) throws Exception { // Write sensitive information (if allowed) AnetObjectEngine.getInstance().getReportSensitiveInformationDao().insertOrUpdate(r.getReportSensitiveInformation(), user, r); r.setUpdatedAt(DateTime.now()); StringBuilder sql = new StringBuilder("/* updateReport */ UPDATE reports SET " + "state = :state, updatedAt = :updatedAt, locationId = :locationId, " + "intent = :intent, exsum = :exsum, text = :reportText, " + "keyOutcomes = :keyOutcomes, nextSteps = :nextSteps, " + "approvalStepId = :approvalStepId, "); if (DaoUtils.isMsSql(dbHandle)) { sql.append("engagementDate = CAST(:engagementDate AS datetime2), releasedAt = CAST(:releasedAt AS datetime2), "); } else { sql.append("engagementDate = :engagementDate, releasedAt = :releasedAt, "); } sql.append("atmosphere = :atmosphere, atmosphereDetails = :atmosphereDetails, " + "cancelledReason = :cancelledReason, " + "principalOrganizationId = :principalOrgId, advisorOrganizationId = :advisorOrgId " + "WHERE id = :id"); return dbHandle.createStatement(sql.toString()) .bindFromProperties(r) .bind("state", DaoUtils.getEnumId(r.getState())) .bind("locationId", DaoUtils.getId(r.getLocation())) .bind("authorId", DaoUtils.getId(r.getAuthor())) .bind("approvalStepId", DaoUtils.getId(r.getApprovalStep())) .bind("atmosphere", DaoUtils.getEnumId(r.getAtmosphere())) .bind("cancelledReason", DaoUtils.getEnumId(r.getCancelledReason())) .bind("advisorOrgId", DaoUtils.getId(r.getAdvisorOrg())) .bind("principalOrgId", DaoUtils.getId(r.getPrincipalOrg())) .execute(); } }); } public int addAttendeeToReport(ReportPerson rp, Report r) { return dbHandle.createStatement("/* addReportAttendee */ INSERT INTO reportPeople " + "(personId, reportId, isPrimary) VALUES (:personId, :reportId, :isPrimary)") .bind("personId", rp.getId()) .bind("reportId", r.getId()) .bind("isPrimary", rp.isPrimary()) .execute(); } public int removeAttendeeFromReport(Person p, Report r) { return dbHandle.createStatement("/* deleteReportAttendee */ DELETE FROM reportPeople " + "WHERE reportId = :reportId AND personId = :personId") .bind("reportId", r.getId()) .bind("personId", p.getId()) .execute(); } public int updateAttendeeOnReport(ReportPerson rp, Report r) { return dbHandle.createStatement("/* updateAttendeeOnReport*/ UPDATE reportPeople " + "SET isPrimary = :isPrimary WHERE reportId = :reportId AND personId = :personId") .bind("reportId", r.getId()) .bind("personId", rp.getId()) .bind("isPrimary", rp.isPrimary()) .execute(); } public int addPoamToReport(Poam p, Report r) { return dbHandle.createStatement("/* addPoamToReport */ INSERT INTO reportPoams (poamId, reportId) " + "VALUES (:poamId, :reportId)") .bind("reportId", r.getId()) .bind("poamId", p.getId()) .execute(); } public int removePoamFromReport(Poam p, Report r) { return dbHandle.createStatement("/* removePoamFromReport*/ DELETE FROM reportPoams " + "WHERE reportId = :reportId AND poamId = :poamId") .bind("reportId", r.getId()) .bind("poamId", p.getId()) .execute(); } public int addTagToReport(Tag t, Report r) { return dbHandle.createStatement("/* addTagToReport */ INSERT INTO reportTags (reportId, tagId) " + "VALUES (:reportId, :tagId)") .bind("reportId", r.getId()) .bind("tagId", t.getId()) .execute(); } public int removeTagFromReport(Tag t, Report r) { return dbHandle.createStatement("/* removeTagFromReport */ DELETE FROM reportTags " + "WHERE reportId = :reportId AND tagId = :tagId") .bind("reportId", r.getId()) .bind("tagId", t.getId()) .execute(); } public List<ReportPerson> getAttendeesForReport(int reportId) { return dbHandle.createQuery("/* getAttendeesForReport */ SELECT " + PersonDao.PERSON_FIELDS + ", reportPeople.isPrimary FROM reportPeople " + "LEFT JOIN people ON reportPeople.personId = people.id " + "WHERE reportPeople.reportId = :reportId") .bind("reportId", reportId) .map(new ReportPersonMapper()) .list(); } public List<Poam> getPoamsForReport(Report report) { return dbHandle.createQuery("/* getPoamsForReport */ SELECT * FROM poams, reportPoams " + "WHERE reportPoams.reportId = :reportId " + "AND reportPoams.poamId = poams.id") .bind("reportId", report.getId()) .map(new PoamMapper()) .list(); } public List<Tag> getTagsForReport(int reportId) { return dbHandle.createQuery("/* getTagsForReport */ SELECT * FROM reportTags " + "INNER JOIN tags ON reportTags.tagId = tags.id " + "WHERE reportTags.reportId = :reportId " + "ORDER BY tags.name") .bind("reportId", reportId) .map(new TagMapper()) .list(); } //Does an unauthenticated search. This will never return any DRAFT or REJECTED reports public ReportList search(ReportSearchQuery query) { return search(query, null); } public ReportList search(ReportSearchQuery query, Person user) { return AnetObjectEngine.getInstance().getSearcher().getReportSearcher() .runSearch(query, dbHandle, user); } /* * Deletes a given report from the database. * Ensures consistency by removing all references to a report before deleting a report. */ public void deleteReport(final Report report) { dbHandle.inTransaction(new TransactionCallback<Void>() { public Void inTransaction(Handle conn, TransactionStatus status) throws Exception { // Delete tags dbHandle.execute("/* deleteReport.tags */ DELETE FROM reportTags where reportId = ?", report.getId()); //Delete poams dbHandle.execute("/* deleteReport.poams */ DELETE FROM reportPoams where reportId = ?", report.getId()); //Delete attendees dbHandle.execute("/* deleteReport.attendees */ DELETE FROM reportPeople where reportId = ?", report.getId()); //Delete comments dbHandle.execute("/* deleteReport.comments */ DELETE FROM comments where reportId = ?", report.getId()); //Delete approvalActions dbHandle.execute("/* deleteReport.actions */ DELETE FROM approvalActions where reportId = ?", report.getId()); //Delete report dbHandle.execute("/* deleteReport.report */ DELETE FROM reports where id = ?", report.getId()); return null; } }); } private DateTime getRollupEngagmentStart(DateTime start) { String maxReportAgeStr = AnetObjectEngine.getInstance().getAdminSetting(AdminSettingKeys.DAILY_ROLLUP_MAX_REPORT_AGE_DAYS); if (maxReportAgeStr == null) { throw new WebApplicationException("Missing Admin Setting for " + AdminSettingKeys.DAILY_ROLLUP_MAX_REPORT_AGE_DAYS); } Integer maxReportAge = Integer.parseInt(maxReportAgeStr); return start.minusDays(maxReportAge); } /* Generates the Rollup Graph for a particular Organization Type, starting at the root of the org hierarchy */ public List<RollupGraph> getDailyRollupGraph(DateTime start, DateTime end, OrganizationType orgType, Map<Integer, Organization> nonReportingOrgs) { List<Map<String, Object>> results = rollupQuery(start, end, orgType, null, false); Map<Integer,Organization> orgMap = AnetObjectEngine.getInstance().buildTopLevelOrgHash(orgType); return generateRollupGraphFromResults(results, orgMap, nonReportingOrgs); } /* Generates a Rollup graph for a particular organization. Starting with a given parent Organization */ public List<RollupGraph> getDailyRollupGraph(DateTime start, DateTime end, Integer parentOrgId, OrganizationType orgType, Map<Integer, Organization> nonReportingOrgs) { List<Organization> orgList = null; Map<Integer,Organization> orgMap; if (parentOrgId.equals(-1) == false) { // -1 is code for no parent org. //doing this as two separate queries because I do need all the information about the organizations OrganizationSearchQuery query = new OrganizationSearchQuery(); query.setParentOrgId(parentOrgId); query.setParentOrgRecursively(true); query.setPageSize(Integer.MAX_VALUE); orgList = AnetObjectEngine.getInstance().getOrganizationDao().search(query).getList(); Optional<Organization> parentOrg = orgList.stream().filter(o -> o.getId().equals(parentOrgId)).findFirst(); if (parentOrg.isPresent() == false) { throw new WebApplicationException("No such organization with id " + parentOrgId, Status.NOT_FOUND); } orgMap = Utils.buildParentOrgMapping(orgList, parentOrgId); } else { orgMap = new HashMap<Integer, Organization>(); //guaranteed to match no orgs! } List<Map<String,Object>> results = rollupQuery(start, end, orgType, orgList, parentOrgId.equals(-1)); return generateRollupGraphFromResults(results, orgMap, nonReportingOrgs); } /* Generates Advisor Report Insights for Organizations */ public List<Map<String,Object>> getAdvisorReportInsights(DateTime start, DateTime end, int orgId) { final Map<String,Object> sqlArgs = new HashMap<String,Object>(); StringBuilder sql = new StringBuilder(); sql.append("/* AdvisorReportInsightsQuery */"); sql.append("SELECT "); sql.append("CASE WHEN a.organizationId IS NULL THEN b.organizationId ELSE a.organizationId END AS organizationId,"); sql.append("CASE WHEN a.organizationShortName IS NULL THEN b.organizationShortName ELSE a.organizationShortName END AS organizationShortName,"); sql.append("%1$s"); sql.append("%2$s"); sql.append("CASE WHEN a.week IS NULL THEN b.week ELSE a.week END AS week,"); sql.append("CASE WHEN a.nrReportsSubmitted IS NULL THEN 0 ELSE a.nrReportsSubmitted END AS nrReportsSubmitted,"); sql.append("CASE WHEN b.nrEngagementsAttended IS NULL THEN 0 ELSE b.nrEngagementsAttended END AS nrEngagementsAttended"); sql.append(" FROM ("); sql.append("SELECT "); sql.append("organizations.id AS organizationId,"); sql.append("organizations.shortName AS organizationShortName,"); sql.append("%3$s"); sql.append("%4$s"); sql.append("DATEPART(week, reports.createdAt) AS week,"); sql.append("COUNT(reports.authorId) AS nrReportsSubmitted"); sql.append(" FROM "); sql.append("positions,"); sql.append("reports,"); sql.append("%5$s"); sql.append("organizations"); sql.append(" WHERE positions.currentPersonId = reports.authorId"); sql.append(" %6$s"); sql.append(" AND reports.advisorOrganizationId = organizations.id"); sql.append(" AND positions.type = :positionAdvisor"); sql.append(" AND reports.state IN ( :reportReleased, :reportPending, :reportDraft )"); sql.append(" AND reports.createdAt BETWEEN :startDate and :endDate"); sql.append(" %11$s"); sql.append(" GROUP BY "); sql.append("organizations.id,"); sql.append("organizations.shortName,"); sql.append("%7$s"); sql.append("%8$s"); sql.append("DATEPART(week, reports.createdAt)"); sql.append(") a"); sql.append(" FULL OUTER JOIN ("); sql.append("SELECT "); sql.append("organizations.id AS organizationId,"); sql.append("organizations.shortName AS organizationShortName,"); sql.append("%3$s"); sql.append("%4$s"); sql.append("DATEPART(week, reports.engagementDate) AS week,"); sql.append("COUNT(reportPeople.personId) AS nrEngagementsAttended"); sql.append(" FROM "); sql.append("positions,"); sql.append("%5$s"); sql.append("reports,"); sql.append("reportPeople,"); sql.append("organizations"); sql.append(" WHERE positions.currentPersonId = reportPeople.personId"); sql.append(" %6$s"); sql.append(" AND reportPeople.reportId = reports.id"); sql.append(" AND reports.advisorOrganizationId = organizations.id"); sql.append(" AND positions.type = :positionAdvisor"); sql.append(" AND reports.state IN ( :reportReleased, :reportPending, :reportDraft )"); sql.append(" AND reports.engagementDate BETWEEN :startDate and :endDate"); sql.append(" %11$s"); sql.append(" GROUP BY "); sql.append("organizations.id,"); sql.append("organizations.shortName,"); sql.append("%7$s"); sql.append("%8$s"); sql.append("DATEPART(week, reports.engagementDate)"); sql.append(") b"); sql.append(" ON "); sql.append(" a.organizationId = b.organizationId"); sql.append(" %9$s"); sql.append(" AND a.week = b.week"); sql.append(" ORDER BY "); sql.append("organizationShortName,"); sql.append("%10$s"); sql.append("week;"); final Object[] fmtArgs; if (orgId > -1) { String selectOrg = " AND organizations.id = " + orgId; fmtArgs = new String[] { "CASE WHEN a.personId IS NULL THEN b.personId ELSE a.personId END AS personId,", "CASE WHEN a.name IS NULL THEN b.name ELSE a.name END AS name,", "people.id AS personId,", "people.name AS name,", "people,", "AND positions.currentPersonId = people.id", "people.id,", "people.name,", "AND a.personId = b.personId", "name,", selectOrg}; } else { fmtArgs = new String[] { "", "", "", "", "", "", "", "", "", "", ""}; } sqlArgs.put("startDate", start); sqlArgs.put("endDate", end); sqlArgs.put("positionAdvisor", Position.PositionType.ADVISOR.ordinal()); sqlArgs.put("reportDraft", ReportState.DRAFT.ordinal()); sqlArgs.put("reportPending", ReportState.PENDING_APPROVAL.ordinal()); sqlArgs.put("reportReleased", ReportState.RELEASED.ordinal()); return dbHandle.createQuery(String.format(sql.toString(), fmtArgs)) .bindFromMap(sqlArgs) .list(); } /** Helper method that builds and executes the daily rollup query * Handles both MsSql and Sqlite * Searching for just all reports and for reports in certain organizations. * @param orgType: the type of organization Id to be lookinf ro * @param orgs: the list of orgs for whose reports to find, null means all * @param missingOrgReports: true if we want to look for reports specifically with NULL org Ids. */ private List<Map<String,Object>> rollupQuery(DateTime start, DateTime end, OrganizationType orgType, List<Organization> orgs, boolean missingOrgReports) { String orgColumn = orgType == OrganizationType.ADVISOR_ORG ? "advisorOrganizationId" : "principalOrganizationId"; Map<String,Object> sqlArgs = new HashMap<String,Object>(); StringBuilder sql = new StringBuilder(); sql.append("/* RollupQuery */ SELECT " + orgColumn + " as orgId, state, count(*) AS count "); sql.append("FROM reports WHERE "); if (DaoUtils.isMsSql(dbHandle)) { sql.append("releasedAt >= :startDate and releasedAt <= :endDate " + "AND engagementDate > :engagementDateStart "); sqlArgs.put("startDate", start); sqlArgs.put("endDate", end); sqlArgs.put("engagementDateStart", getRollupEngagmentStart(start)); } else { sql.append("releasedAt >= DateTime(:startDate) AND releasedAt <= DateTime(:endDate) " + "AND engagementDate > DateTime(:engagementDateStart) "); sqlArgs.put("startDate", SqliteReportSearcher.sqlitePattern.print(start)); sqlArgs.put("endDate", SqliteReportSearcher.sqlitePattern.print(end)); sqlArgs.put("engagementDateStart", SqliteReportSearcher.sqlitePattern.print(getRollupEngagmentStart(start))); } if (orgs != null) { List<String> sqlBind = new LinkedList<String>(); int orgNum = 0; for (Organization o : orgs) { sqlArgs.put("orgId" + orgNum, o.getId()); sqlBind.add(":orgId" + orgNum); orgNum++; } String orgInSql = Joiner.on(',').join(sqlBind); sql.append("AND " + orgColumn + " IN (" + orgInSql + ") "); } else if (missingOrgReports) { sql.append(" AND " + orgColumn + " IS NULL "); } sql.append("GROUP BY " + orgColumn + ", state"); return dbHandle.createQuery(sql.toString()) .bindFromMap(sqlArgs) .list(); } /* Given the results from the database on the number of reports grouped by organization * And the map of each organization to the organization that their reports roll up to * this method returns the final rollup graph information. */ private List<RollupGraph> generateRollupGraphFromResults(List<Map<String,Object>> dbResults, Map<Integer, Organization> orgMap, Map<Integer, Organization> nonReportingOrgs) { Map<Integer,Map<ReportState,Integer>> rollup = new HashMap<Integer,Map<ReportState,Integer>>(); for (Map<String,Object> result : dbResults) { Integer orgId = (Integer) result.get("orgId"); if (nonReportingOrgs.containsKey(orgId)) { // Skip non-reporting organizations continue; } Integer count = (Integer) result.get("count"); ReportState state = ReportState.values()[(Integer) result.get("state")]; Integer parentOrgId = DaoUtils.getId(orgMap.get(orgId)); Map<ReportState,Integer> orgBar = rollup.get(parentOrgId); if (orgBar == null) { orgBar = new HashMap<ReportState,Integer>(); rollup.put(parentOrgId, orgBar); } orgBar.put(state, Utils.orIfNull(orgBar.get(state), 0) + count); } // Add all (top-level) organizations without any reports for (final Map.Entry<Integer, Organization> entry : orgMap.entrySet()) { final Integer orgId = entry.getKey(); if (nonReportingOrgs.containsKey(orgId)) { // Skip non-reporting organizations continue; } final Integer parentOrgId = DaoUtils.getId(orgMap.get(orgId)); if (!rollup.keySet().contains(parentOrgId)) { final Map<ReportState, Integer> orgBar = new HashMap<ReportState, Integer>(); orgBar.put(ReportState.RELEASED, 0); orgBar.put(ReportState.CANCELLED, 0); rollup.put(parentOrgId, orgBar); } } List<RollupGraph> result = new LinkedList<RollupGraph>(); for (Map.Entry<Integer, Map<ReportState,Integer>> entry : rollup.entrySet()) { Map<ReportState,Integer> values = entry.getValue(); RollupGraph bar = new RollupGraph(); bar.setOrg(orgMap.get(entry.getKey())); bar.setReleased(Utils.orIfNull(values.get(ReportState.RELEASED), 0)); bar.setCancelled(Utils.orIfNull(values.get(ReportState.CANCELLED), 0)); result.add(bar); } return result; } }
package net.imagej.legacy; import java.awt.GraphicsEnvironment; import java.io.File; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import java.util.concurrent.Future; import net.imagej.DatasetService; import net.imagej.ImageJService; import net.imagej.display.ImageDisplay; import net.imagej.display.ImageDisplayService; import net.imagej.display.OverlayService; import net.imagej.legacy.plugin.LegacyCommand; import net.imagej.legacy.ui.LegacyUI; import net.imagej.patcher.LegacyEnvironment; import net.imagej.patcher.LegacyInjector; import net.imagej.threshold.ThresholdService; import net.imagej.ui.viewer.image.ImageDisplayViewer; import org.scijava.Identifiable; import org.scijava.MenuPath; import org.scijava.Priority; import org.scijava.Versioned; import org.scijava.app.App; import org.scijava.app.AppService; import org.scijava.app.StatusService; import org.scijava.command.Command; import org.scijava.command.CommandInfo; import org.scijava.command.CommandService; import org.scijava.display.DisplayService; import org.scijava.display.event.DisplayActivatedEvent; import org.scijava.display.event.input.KyPressedEvent; import org.scijava.display.event.input.KyReleasedEvent; import org.scijava.event.EventHandler; import org.scijava.event.EventService; import org.scijava.input.Accelerator; import org.scijava.input.KeyCode; import org.scijava.log.LogService; import org.scijava.menu.MenuService; import org.scijava.module.ModuleInfo; import org.scijava.module.ModuleService; import org.scijava.options.OptionsService; import org.scijava.plugin.Parameter; import org.scijava.plugin.Plugin; import org.scijava.plugin.PluginService; import org.scijava.script.ScriptInfo; import org.scijava.script.ScriptService; import org.scijava.service.AbstractService; import org.scijava.service.Service; import org.scijava.ui.ApplicationFrame; import org.scijava.ui.UIService; import org.scijava.ui.UserInterface; import org.scijava.ui.swing.script.TextEditor; import org.scijava.ui.viewer.DisplayWindow; import org.scijava.util.AppUtils; /** * Service for working with legacy ImageJ 1.x. * <p> * The legacy service overrides the behavior of various legacy ImageJ methods, * inserting seams so that (e.g.) the modern UI is aware of legacy ImageJ events * as they occur. * </p> * <p> * It also maintains an image map between legacy ImageJ {@link ij.ImagePlus} * objects and modern ImageJ {@link ImageDisplay}s. * </p> * <p> * In this fashion, when a legacy command is executed on a {@link ImageDisplay}, * the service transparently translates it into an {@link ij.ImagePlus}, and * vice versa, enabling backward compatibility with legacy commands. * </p> * * @author Barry DeZonia * @author Curtis Rueden * @author Johannes Schindelin * @author Mark Hiner */ @Plugin(type = Service.class, priority = Priority.NORMAL_PRIORITY + 1) public final class LegacyService extends AbstractService implements ImageJService, Versioned { /** * Static reference to the one and only active {@link LegacyService}. The JVM * can only have one instance of ImageJ 1.x, and hence one LegacyService, * active at a time. */ private static LegacyService instance; private static Throwable instantiationStackTrace; static { // NB: Prime ImageJ 1.x for patching. // This will only work if this class does _not_ need to load any ij.* // classes for it itself to be loaded. I.e.: this class must have _no_ // references to ij.* classes in its API (supertypes, fields, method // arguments and method return types). LegacyInjector.preinit(); } @Parameter private LogService log; @Parameter private CommandService commandService; @Parameter private OptionsService optionsService; @Parameter private ImageDisplayService imageDisplayService; @Parameter private ModuleService moduleService; @Parameter private ScriptService scriptService; @Parameter private StatusService statusService; @Parameter(required = false) private AppService appService; // FIXME: Why isn't this service declared as an optional parameter? private UIService uiService; // NB: Unused services, declared only to affect service initialization order. @Parameter(required = false) private DatasetService datasetService; @Parameter(required = false) private DisplayService displayService; @Parameter(required = false) private EventService eventService; @Parameter(required = false) private MenuService menuService; @Parameter(required = false) private OverlayService overlayService; @Parameter(required = false) private PluginService pluginService; @Parameter(required = false) private ThresholdService thresholdService; /** Mapping between modern and legacy image data structures. */ private LegacyImageMap imageMap; /** * A buffer object which keeps all references to ImageJ 1.x separated from * this class. */ private IJ1Helper ij1Helper; private final ThreadLocal<Boolean> isProcessingEvents = new ThreadLocal<Boolean>(); /** * Map of ImageJ2 {@link Command}s which are compatible with the legacy user * interface. A command is considered compatible if it is not tagged with the * {@code "no-legacy"} key in its {@link Parameter#attrs()} list. The map is * keyed on identifier; see the {@link Identifiable} interface. */ private final Map<String, ModuleInfo> legacyCompatible = new HashMap<String, ModuleInfo>(); // -- LegacyService methods -- /** Gets the LogService associated with this LegacyService. */ public LogService log() { return log; } /** Gets the StatusService associated with this LegacyService. */ public StatusService status() { return statusService; } public synchronized UIService uiService() { if (uiService == null) uiService = getContext().getService(UIService.class); return uiService; } /** * Gets the helper class responsible for direct interfacing with ImageJ1. * Ideally, all accesses to {@code ij.*} classes should be done through this * helper class, to avoid class loader woes. */ public IJ1Helper getIJ1Helper() { return ij1Helper; } /** Gets the LegacyImageMap associated with this LegacyService. */ public synchronized LegacyImageMap getImageMap() { if (imageMap == null) imageMap = new LegacyImageMap(this); return imageMap; } /** * Runs a legacy command programmatically. * * @param ij1ClassName The name of the plugin class you want to run e.g. * "ij.plugin.Clipboard" * @param argument The argument string to pass to the plugin e.g. "copy" */ public void runLegacyCommand(final String ij1ClassName, final String argument) { final String arg = argument == null ? "" : argument; final Map<String, Object> inputMap = new HashMap<String, Object>(); inputMap.put("className", ij1ClassName); inputMap.put("arg", arg); commandService.run(LegacyCommand.class, true, inputMap); } /** * Runs the legacy compatible command with the given identifier. * * @param key The identifier of the command to execute. * @return The {@link Future} of the command execution; or if the identifier * describes a script and the shift key is down, then the * {@link TextEditor} of the new Script Editor window which was * opened. * @see Identifiable */ public Object runLegacyCompatibleCommand(final String key) { final ModuleInfo info = legacyCompatible.get(key); if (info == null) return null; if (info instanceof CommandInfo) try { return commandService.run((CommandInfo) info, true).get(); } catch (final Exception e) { if (e instanceof RuntimeException) throw (RuntimeException) e; throw new RuntimeException(e); } if (info instanceof ScriptInfo) { if (ij1Helper.shiftKeyDown()) { final ScriptInfo script = (ScriptInfo) info; final TextEditor editor = new TextEditor(getContext()); editor.open(new File(script.getPath())); editor.setVisible(true); return editor; } try { return scriptService.run((ScriptInfo) info, true).get(); } catch (final Exception e) { if (e instanceof RuntimeException) throw (RuntimeException) e; throw new RuntimeException(e); } } throw new IllegalArgumentException("Unhandled info for '" + key + "': " + info); } /** * Ensures that the currently active {@link ij.ImagePlus} matches the * currently active {@link ImageDisplay}. Does not perform any harmonization. */ public void syncActiveImage() { final ImageDisplay activeDisplay = imageDisplayService.getActiveImageDisplay(); ij1Helper.syncActiveImage(activeDisplay); } /** * Returns true if this LegacyService has been initialized already and false * if not. */ public boolean isInitialized() { return instance != null; } /** * States whether ImageJ1 and ImageJ2 data structures should be kept in sync. * <p> * While synchronization is supposed to be as cheap as possible, in practice * there are limitations with it currently which impact performance. So * such synchronization is off by default. The main consequence is that * it becomes harder to "mix and match" ImageJ1 and ImageJ2 APIs: you cannot * open an {@link ij.ImagePlus} and then reference it later from an ImageJ2 * {@link org.scijava.command.Command} as a {@link net.imagej.Dataset} unless * synchronization is enabled. */ public boolean isSyncEnabled() { final ImageJ2Options ij2Options = optionsService.getOptions(ImageJ2Options.class); return ij2Options == null ? false : ij2Options.isSyncEnabled(); } /** * States whether we're running in legacy ImageJ 1.x mode. * * To support work flows which are incompatible with ImageJ2, we want to allow * users to run in legacy ImageJ 1.x mode, where the ImageJ2 GUI is hidden and * the ImageJ 1.x GUI is shown. During this time, no synchronization should take * place. */ public boolean isLegacyMode() { return ij1Helper != null && ij1Helper.getIJ() != null; } /** Switches to/from running legacy ImageJ 1.x mode. */ public void toggleLegacyMode(final boolean wantIJ1) { toggleLegacyMode(wantIJ1, false); } public synchronized void toggleLegacyMode(final boolean wantIJ1, final boolean initializing) { // TODO: hide/show Brightness/Contrast, Color Picker, Command Launcher, etc if (!initializing) { if (uiService() != null) { // hide/show the IJ2 main window final UserInterface ui = uiService.getDefaultUI(); if (ui != null && ui instanceof LegacyUI) { UserInterface modern = null; for (final UserInterface ui2 : uiService.getAvailableUIs()) { if (ui2 == ui) continue; modern = ui2; break; } if (modern == null) { log.error("No modern UI available"); return; } final ApplicationFrame frame = ui.getApplicationFrame(); ApplicationFrame modernFrame = modern.getApplicationFrame(); if (!wantIJ1 && modernFrame == null) { if (ij1Helper.isVisible()) modern.show(); modernFrame = modern.getApplicationFrame(); } if (frame == null || modernFrame == null) { log.error("Application frame missing: " + frame + " / " + modernFrame); return; } frame.setVisible(wantIJ1); modernFrame.setVisible(!wantIJ1); } else { final ApplicationFrame appFrame = ui == null ? null : ui.getApplicationFrame(); if (appFrame == null) { if (ui != null && !wantIJ1) uiService.showUI(); } else { appFrame.setVisible(!wantIJ1); } } } // TODO: move this into the LegacyImageMap's toggleLegacyMode, passing // the uiService // hide/show the IJ2 datasets corresponding to legacy ImagePlus instances for (final ImageDisplay display : getImageMap().getImageDisplays()) { final ImageDisplayViewer viewer = (ImageDisplayViewer) uiService.getDisplayViewer(display); if (viewer == null) continue; final DisplayWindow window = viewer.getWindow(); if (window != null) window.showDisplay(!wantIJ1); } } // hide/show IJ1 main window ij1Helper.setVisible(wantIJ1); getImageMap().toggleLegacyMode(wantIJ1); } public App getApp() { if (appService == null) return null; return appService.getApp(); } public void handleException(final Throwable e) { log.error(e); ij1Helper.handleException(e); } // -- Service methods -- @Override public void initialize() { checkInstance(); try { final ClassLoader loader = Thread.currentThread().getContextClassLoader(); final boolean ij1Initialized = LegacyEnvironment.isImageJ1Initialized(loader); if (!ij1Initialized) { getLegacyEnvironment(loader).newImageJ1(true); } ij1Helper = new IJ1Helper(this); } catch (final Throwable t) { throw new RuntimeException("Failed to instantiate IJ1.", t); } synchronized (LegacyService.class) { checkInstance(); instance = this; instantiationStackTrace = new Throwable("Initialized here:"); final ClassLoader loader = Thread.currentThread().getContextClassLoader(); LegacyInjector.installHooks(loader, new DefaultLegacyHooks(this, ij1Helper)); } ij1Helper.initialize(); ij1Helper.addAliases(scriptService); SwitchToModernMode.registerMenuItem(); // NB: We cannot call appService.getApp().getBaseDirectory(), because // that prevents the net.imagej.app.ToplevelImageJApp from getting its // LegacyService parameter injected properly. // So we get the app directory in a much more unsafe way... final File topLevel = AppUtils.getBaseDirectory("imagej.dir", getClass(), null); final File plugins = new File(topLevel, "plugins"); if (plugins.exists()) { final File scripts = new File(plugins, "Scripts"); if (scripts.exists()) scriptService.addScriptDirectory(scripts); scriptService.addScriptDirectory(plugins, new MenuPath("Plugins")); } ij1Helper.addMenuItems(); } // -- Disposable methods -- @Override public void dispose() { ij1Helper.dispose(); final ClassLoader loader = Thread.currentThread().getContextClassLoader(); LegacyInjector.installHooks(loader, null); synchronized (LegacyService.class) { instance = null; instantiationStackTrace = null; } // clean up SingleInstance remote objects SingleInstance.shutDown(); } // -- Versioned methods -- /** Gets the version of ImageJ 1.x being used. */ @Override public String getVersion() { return ij1Helper.getVersion(); } // -- Utility methods -- /** * Returns the legacy service associated with the ImageJ 1.x instance in the * current class loader. This method is invoked by the javassisted methods of * ImageJ 1.x. * * @return the legacy service */ public static LegacyService getInstance() { return instance; } // -- Event handlers -- /** * Keeps the active legacy {@link ij.ImagePlus} in sync with the active modern * {@link ImageDisplay}. * * @param event */ @EventHandler private void onEvent(final DisplayActivatedEvent event) { syncActiveImage(); } @EventHandler private void onEvent(final KyPressedEvent event) { final KeyCode code = event.getCode(); if (code == KeyCode.SPACE) ij1Helper.setKeyDown(KeyCode.SPACE.getCode()); if (code == KeyCode.ALT) ij1Helper.setKeyDown(KeyCode.ALT.getCode()); if (code == KeyCode.SHIFT) ij1Helper.setKeyDown(KeyCode.SHIFT.getCode()); if (code == KeyCode.CONTROL) ij1Helper .setKeyDown(KeyCode.CONTROL.getCode()); if (ij1Helper.isMacintosh() && code == KeyCode.META) { ij1Helper.setKeyDown(KeyCode.CONTROL.getCode()); } } @EventHandler private void onEvent(final KyReleasedEvent event) { final KeyCode code = event.getCode(); if (code == KeyCode.SPACE) ij1Helper.setKeyUp(KeyCode.SPACE.getCode()); if (code == KeyCode.ALT) ij1Helper.setKeyUp(KeyCode.ALT.getCode()); if (code == KeyCode.SHIFT) ij1Helper.setKeyUp(KeyCode.SHIFT.getCode()); if (code == KeyCode.CONTROL) ij1Helper.setKeyUp(KeyCode.CONTROL.getCode()); if (ij1Helper.isMacintosh() && code == KeyCode.META) { ij1Helper.setKeyUp(KeyCode.CONTROL.getCode()); } } // -- Internal methods -- /** * <strong>This is not part of the public API. DO NOT USE!</strong> * <p> * This method toggles a {@link ThreadLocal} flag as to whether or not legacy * UI components are in the process of handling {@code StatusEvents}. * </p> * * @return the old processing value */ public boolean setProcessingEvents(final boolean processing) { final boolean result = isProcessingEvents(); if (result != processing) { isProcessingEvents.set(processing); } return result; } /** * <strong>This is not part of the public API. DO NOT USE!</strong> * <p> * {@link ThreadLocal} check to see if components are in the middle of * processing events. * </p> * * @return True iff this thread is already processing events through the * {@code LegacyService}. */ public boolean isProcessingEvents() { final Boolean result = isProcessingEvents.get(); return result == Boolean.TRUE; } /** * <strong>This is not part of the public API. DO NOT USE!</strong> * <p> * Adds all legacy compatible commands to the ImageJ1 menus. The nested menu * structure of each command is preserved. * </p> */ public Map<String, ModuleInfo> getScriptsAndNonLegacyCommands() { final Map<String, ModuleInfo> modules = new LinkedHashMap<String, ModuleInfo>(); legacyCompatible.clear(); for (final CommandInfo info : commandService .getCommandsOfType(Command.class)) { if (info.getMenuPath().size() == 0 || info.is("no-legacy")) { continue; } else if (!info.getAnnotation().visible()) { continue; } final String key = info.getIdentifier(); legacyCompatible.put(key, info); modules.put(key, info); } for (final ScriptInfo info : scriptService.getScripts()) { if (info.getMenuPath().size() == 0) { continue; } final String path = info.getPath(); if (!new File(path).getName().contains("_")) continue; final String key = info.getIdentifier(); legacyCompatible.put(key, info); modules.put(key, info); } return modules; } /** <strong>This is not part of the public API. DO NOT USE!</strong> */ boolean handleShortcut(final String accelerator) { final Accelerator acc = Accelerator.create(accelerator); if (acc == null) return false; final ModuleInfo module = moduleService.getModuleForAccelerator(acc); if (module == null || module.is("no-legacy")) return false; moduleService.run(module, true); return true; } // -- Helper methods -- /** * @throws UnsupportedOperationException if the singleton * {@code LegacyService} already exists. */ private void checkInstance() { if (instance != null) { throw new UnsupportedOperationException( "Cannot instantiate more than one LegacyService", instantiationStackTrace); } } private static LegacyEnvironment getLegacyEnvironment(final ClassLoader loader) throws ClassNotFoundException { final boolean headless = GraphicsEnvironment.isHeadless(); final LegacyEnvironment ij1 = new LegacyEnvironment(loader, headless); ij1.disableInitializer(); ij1.noPluginClassLoader(); ij1.suppressIJ1ScriptDiscovery(); ij1.applyPatches(); return ij1; } // -- Deprecated methods -- /** * Makes sure that the ImageJ 1.x classes are patched. * <p> * We absolutely require that the LegacyInjector did its job before we use the * ImageJ 1.x classes. * </p> * <p> * Just loading the {@link LegacyService} class is not enough; it will * not necessarily get initialized. So we provide this method just to force * class initialization (and thereby the LegacyInjector to patch ImageJ 1.x). * </p> * * @deprecated use {@link LegacyInjector#preinit()} instead */ @Deprecated public static void preinit() { try { getLegacyEnvironment(Thread.currentThread().getContextClassLoader()); } catch (final Throwable t) { t.printStackTrace(); } } }
package opencsp.csta.types; import java.util.Collections; import java.util.List; import java.util.NoSuchElementException; public class MonitorPoint { private CrossReferenceId crossReferenceId; private Device monitoredDevice; public MonitorPoint(CrossReferenceId crossReferenceId, Device monitoredDevice) { this.crossReferenceId = crossReferenceId; this.monitoredDevice = monitoredDevice; } }
package org.broad.igv.track; import htsjdk.tribble.AsciiFeatureCodec; import htsjdk.tribble.Feature; import htsjdk.variant.vcf.VCFHeader; import org.apache.log4j.Logger; import org.broad.igv.bbfile.BBFileReader; import org.broad.igv.bigwig.BigWigDataSource; import org.broad.igv.blast.BlastMapping; import org.broad.igv.blast.BlastParser; import org.broad.igv.data.*; import org.broad.igv.data.cufflinks.*; import org.broad.igv.data.expression.ExpressionDataset; import org.broad.igv.data.expression.ExpressionFileParser; import org.broad.igv.data.seg.*; import org.broad.igv.exceptions.DataLoadException; import org.broad.igv.feature.*; import org.broad.igv.feature.basepair.BasePairTrack; import org.broad.igv.bedpe.BedPEParser; import org.broad.igv.bedpe.InteractionTrack; import org.broad.igv.feature.bionano.SMAPParser; import org.broad.igv.feature.bionano.SMAPRenderer; import org.broad.igv.feature.cyto.CytobandTrack; import org.broad.igv.feature.dranger.DRangerParser; import org.broad.igv.feature.dsi.DSIRenderer; import org.broad.igv.feature.dsi.DSITrack; import org.broad.igv.feature.genome.load.GenbankParser; import org.broad.igv.feature.genome.Genome; import org.broad.igv.feature.genome.GenomeManager; import org.broad.igv.feature.gff.GFFFeatureSource; import org.broad.igv.feature.sprite.ClusterParser; import org.broad.igv.feature.sprite.ClusterTrack; import org.broad.igv.feature.tribble.CodecFactory; import org.broad.igv.feature.tribble.FeatureFileHeader; import org.broad.igv.feature.tribble.GFFCodec; import org.broad.igv.feature.tribble.TribbleIndexNotFoundException; import org.broad.igv.goby.GobyAlignmentQueryReader; import org.broad.igv.goby.GobyCountArchiveDataSource; import org.broad.igv.google.GoogleUtils; import org.broad.igv.gwas.*; import org.broad.igv.htsget.HtsgetUtils; import org.broad.igv.htsget.HtsgetVariantSource; import org.broad.igv.lists.GeneList; import org.broad.igv.lists.GeneListManager; import org.broad.igv.maf.MultipleAlignmentTrack; import org.broad.igv.methyl.MethylTrack; import org.broad.igv.prefs.PreferencesManager; import org.broad.igv.renderer.HeatmapRenderer; import org.broad.igv.renderer.MutationRenderer; import org.broad.igv.renderer.PointsRenderer; import org.broad.igv.sam.*; import org.broad.igv.sam.reader.IndexNotFoundException; import org.broad.igv.tdf.TDFDataSource; import org.broad.igv.tdf.TDFReader; import org.broad.igv.ui.IGV; import org.broad.igv.ui.util.ConfirmDialog; import org.broad.igv.ui.util.ConvertFileDialog; import org.broad.igv.ui.util.ConvertOptions; import org.broad.igv.ui.util.MessageUtils; import org.broad.igv.util.*; import org.broad.igv.variant.VariantTrack; import org.broad.igv.variant.util.PedigreeUtils; import java.io.BufferedReader; import java.io.IOException; import java.io.StringReader; import java.util.*; import static org.broad.igv.prefs.Constants.*; public class TrackLoader { private static Logger log = Logger.getLogger(TrackLoader.class); private static Collection<? extends Class> NOLogExceptions = Arrays.asList(TribbleIndexNotFoundException.class); /** * Switches on various attributes of locator (mainly locator path extension and whether the locator is indexed) * to call the appropriate loading method. * * @param locator * @param genome * @return */ public List<Track> load(ResourceLocator locator, Genome genome) throws DataLoadException { final String path = locator.getPath().trim(); // Check if the AWS credentials are still valid. If not, re-login and renew pre-signed urls if (AmazonUtils.isAwsS3Path(path)) { AmazonUtils.checkLogin(); } log.info("Loading resource, path " + path); try { String format = locator.getFormat(); if (format.equals("tbi")) { MessageUtils.showMessage("<html><b>Error:</b>File type '.tbi' is not recognized. If this is a 'tabix' index <br>" + " load the associated gzipped file, which should have an extension of '.gz'"); } //This list will hold all new tracks created for this locator List<Track> newTracks = new ArrayList<Track>(); if (locator.isHtsget()) { tryHtsget(locator, newTracks, genome); } else if (format.equals("gmt")) { loadGMT(locator); } else if (format.equals("vcf.list")) { loadVCFListFile(locator, newTracks, genome); } else if (format.equals("trio")) { loadTrioData(locator); } else if (format.equals("gct") || format.equals("res") || format.equals("tab")) { loadGctFile(locator, newTracks, genome); } else if (format.equals("gbk") || format.equals("gb")) { loadGbkFile(locator, newTracks, genome); } else if (format.equals("cn") || format.equals("xcn") || format.equals("snp") || format.equals("igv") || format.equals("loh")) { loadIGVFile(locator, newTracks, genome); } else if (format.equals("cbs") || format.equals("seg") || format.equals("glad") || format.equals("birdseye_canary_calls") || format.equals("seg.zip")) { loadSegFile(locator, newTracks, genome); } else if (format.equals("gistic")) { loadGisticFile(locator, newTracks); } else if (format.contains(".tabblastn") || format.equals("orthologs")) { loadBlastMapping(locator, newTracks); } else if (isAlignmentTrack(format) || (path.startsWith("http") && path.contains("/query.cgi?"))) { loadAlignmentsTrack(locator, newTracks, genome); } else if (format.equals("shape") || format.equals("map")) { convertLoadShapeFile(locator, newTracks, genome); } else if (format.equals("wig") || format.equals("bedgraph") || format.equals("bdg") || format.equals("cpg") || format.equals("expr")) { loadWigFile(locator, newTracks, genome); } else if (format.equals("fpkm_tracking") || format.equals("gene_exp.diff") || format.equals("cds_exp.diff")) { loadCufflinksFile(locator, newTracks, genome); } else if (format.contains(".dranger")) { loadDRangerFile(locator, newTracks, genome); } else if (format.equals("ewig.tdf")) { loadEwigIBFFile(locator, newTracks, genome); } else if (format.equals("bw") || format.equals("bb") || format.equals("bigwig") || format.equals("bigbed")) { loadBWFile(locator, newTracks, genome); } else if (format.equals("ibf") || format.equals("tdf")) { loadTDFFile(locator, newTracks, genome); } else if (format.equals("counts")) { loadGobyCountsArchive(locator, newTracks, genome); } else if (WiggleParser.isWiggle(locator)) { loadWigFile(locator, newTracks, genome); } else if (format.equals("maf.dict")) { loadMultipleAlignmentTrack(locator, newTracks, genome); } else if (format.equals("db") || format.equals("dbn")) { convertLoadStructureFile(locator, newTracks, genome, "dotBracket"); } else if (format.equals("ct")) { convertLoadStructureFile(locator, newTracks, genome, "connectTable"); } else if (format.equals("dp")) { convertLoadStructureFile(locator, newTracks, genome, "pairingProb"); } else if (format.equals("bp")) { loadBasePairFile(locator, newTracks, genome); } else if (GWASParser.isGWASFile(format)) { loadGWASFile(locator, newTracks, genome); } else if (GobyAlignmentQueryReader.supportsFileType(path)) { loadAlignmentsTrack(locator, newTracks, genome); } else if (format.equals("list")) { // This should be deprecated loadListFile(locator, newTracks, genome); } else if (format.equals("smap")) { loadSMAPFile(locator, newTracks, genome); } else if (format.equals("dsi")) { loadDSIFile(locator, newTracks, genome); } else if (format.equals("bedpe")) { loadBedPEFile(locator, newTracks, genome); } else if (format.equals("clusters")) { loadClusterFile(locator, newTracks, genome); } else if (CodecFactory.hasCodec(locator, genome) && !forceNotTribble(format)) { loadTribbleFile(locator, newTracks, genome); } else if (MutationTrackLoader.isMutationAnnotationFile(locator)) { loadMutFile(locator, newTracks, genome); // Must be tried before ".maf" test below } else if (format.equals("maf")) { loadMultipleAlignmentTrack(locator, newTracks, genome); } else { //if a url, try htsget boolean isHtsget = tryHtsget(locator, newTracks, genome); if (!isHtsget) { // If the file is too large, give up // TODO -- ftp test final int tenMB = 10000000; long fileLength = ParsingUtils.getContentLength(locator.getPath()); if (fileLength > tenMB) { MessageUtils.confirm("<html>Cannot determine file type of: " + locator.getPath()); } // Read file contents and try to sort it out String contents = FileUtils.getContents(locator.getPath()); BufferedReader reader = new BufferedReader(new StringReader(contents)); if (CytoBandFileParser.isValid(reader, locator.getPath())) { Track track = new CytobandTrack(locator, new BufferedReader(new StringReader(contents)), genome); newTracks.add(track); } else if (AttributeManager.isSampleInfoFile(reader)) { // This might be a sample information file. AttributeManager.getInstance().loadSampleInfo(locator); } else { MessageUtils.showMessage("<html>Unknown file type: " + path + "<br>Check file extension"); } } } // Track line if (newTracks.size() > 0) { TrackProperties tp = null; String trackLine = locator.getTrackLine(); if (trackLine != null) { tp = new TrackProperties(); ParsingUtils.parseTrackLine(trackLine, tp); } for (Track track : newTracks) { if (locator.getFeatureInfoURL() != null) { track.setUrl(locator.getFeatureInfoURL()); } if (tp != null) { track.setProperties(tp); } if (locator.getColor() != null) { track.setColor(locator.getColor()); } if (locator.getSampleId() != null) { track.setSampleId(locator.getSampleId()); } } } return newTracks; } catch (Exception e) { if (!NOLogExceptions.contains(e.getClass())) { log.error(e.getMessage(), e); } throw new DataLoadException(e.getMessage()); } } private boolean tryHtsget(ResourceLocator locator, List<Track> newTracks, Genome genome) { boolean isHtsget = false; if (locator.getPath().startsWith("https: locator.getPath().startsWith("http: locator.getPath().startsWith("htsget: try { HtsgetUtils.Metadata htsgetMeta = HtsgetUtils.getMetadata(locator.getPath()); if (htsgetMeta != null) { isHtsget = true; locator.setFormat(htsgetMeta.getFormat().toLowerCase()); if (htsgetMeta.getFormat().equals("VCF")) { locator.setHtsget(true); HtsgetVariantSource source = new HtsgetVariantSource(htsgetMeta, genome); loadVCFWithSource(locator, source, newTracks); } else if (htsgetMeta.getFormat().equals("BAM") || htsgetMeta.getFormat().equals("CRAM")) { locator.setHtsget(true); loadAlignmentsTrack(locator, newTracks, genome); } else { throw new RuntimeException("Format: '" + htsgetMeta.getFormat() + "' is not supported for htsget servers."); } } } catch (IOException e) { // Not neccessarily an error, might just indicate its not an htsget server. Not sure // if this should be logged or not, it will be a common and expected occurence when loading // sample information, which is checked after htsget return false; } } return isHtsget; } public static boolean isAlignmentTrack(String typeString) { return typeString.equals("sam") || typeString.equals("bam") || typeString.equals("cram") || typeString.equals("sam.list") || typeString.equals("bam.list") || typeString.equals("aligned") || typeString.equals("sai") || typeString.equals("bai") || typeString.equals("csi") || typeString.equals("alist"); } private void loadSMAPFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { List<Feature> features = SMAPParser.parseFeatures(locator, genome); FeatureCollectionSource src = new FeatureCollectionSource(features, genome); FeatureTrack track = new FeatureTrack(locator, locator.getName(), src); track.setRendererClass(SMAPRenderer.class); track.setDisplayMode(Track.DisplayMode.EXPANDED); newTracks.add(track); } private boolean forceNotTribble(String typeString) { List<String> nonTribble = Arrays.asList("fpkm_tracking", "exp_diff", "_exp.diff"); for (String s : nonTribble) { if (typeString.endsWith(s)) { return true; } } return false; } private void loadGMT(ResourceLocator locator) throws IOException { List<GeneList> lists = GeneListManager.getInstance().loadGMTFile(locator.getPath()); if (lists.size() == 1) { GeneList gl = lists.get(0); IGV.getInstance().setGeneList(gl, true); } else { MessageUtils.showMessage("Loaded " + lists.size() + " gene lists."); } } private void loadVCF(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException, TribbleIndexNotFoundException { TribbleFeatureSource src = TribbleFeatureSource.getFeatureSource(locator, genome); loadVCFWithSource(locator, src, newTracks); } private void loadVCFWithSource(ResourceLocator locator, FeatureSource src, List<Track> newTracks) { VCFHeader header = (VCFHeader) src.getHeader(); // Test if the input VCF file contains methylation rate data: // This is determined by testing for the presence of two sample format fields: MR and GB, used in the // rendering of methylation rate. // MR is the methylation rate on a scale of 0 to 100% and GB is the number of bases that pass // filter for the position. GB is needed to avoid displaying positions for which limited coverage // prevents reliable estimation of methylation rate. boolean enableMethylationRateSupport = (header.getFormatHeaderLine("MR") != null && header.getFormatHeaderLine("GB") != null); List<String> allSamples = new ArrayList(header.getGenotypeSamples()); VariantTrack t = new VariantTrack(locator, src, allSamples, enableMethylationRateSupport); // VCF tracks handle their own margin t.setMargin(0); newTracks.add(t); } private void loadVCFListFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException, TribbleIndexNotFoundException { TribbleListFeatureSource src = new TribbleListFeatureSource(locator.getPath(), genome); VCFHeader header = (VCFHeader) src.getHeader(); // Test if the input VCF file contains methylation rate data: // This is determined by testing for the presence of two sample format fields: MR and GB, used in the // rendering of methylation rate. // MR is the methylation rate on a scale of 0 to 100% and GB is the number of bases that pass // filter for the position. GB is needed to avoid displaying positions for which limited coverage // prevents reliable estimation of methylation rate. boolean enableMethylationRateSupport = (header.getFormatHeaderLine("MR") != null && header.getFormatHeaderLine("GB") != null); List<String> allSamples = new ArrayList(header.getGenotypeSamples()); VariantTrack t = new VariantTrack(locator, src, allSamples, enableMethylationRateSupport); // VCF tracks handle their own margin t.setMargin(0); newTracks.add(t); } private void loadBlastMapping(ResourceLocator locator, List<Track> newTracks) { List<BlastMapping> mappings = (new BlastParser()).parse(locator.getPath()); List<htsjdk.tribble.Feature> features = new ArrayList<htsjdk.tribble.Feature>(mappings.size()); features.addAll(mappings); Genome genome = GenomeManager.getInstance().getCurrentGenome(); FeatureTrack track = new FeatureTrack(locator, new FeatureCollectionSource(features, genome)); track.setName(locator.getTrackName()); // track.setRendererClass(AlignmentBlockRenderer.class); newTracks.add(track); } private void loadDRangerFile(ResourceLocator locator, List<Track> newTracks, Genome genome) { DRangerParser parser = new DRangerParser(); newTracks.addAll(parser.loadTracks(locator, genome)); } private void loadBedPEFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { BedPEParser.Dataset features = BedPEParser.parse(locator, genome); newTracks.add(new InteractionTrack(locator, features, genome)); } private void loadClusterFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { ClusterParser.ClusterSet features = ClusterParser.parse(locator.getPath()); newTracks.add(new ClusterTrack(locator, features, genome)); } /** * Load the input file as a feature, mutation, or maf (multiple alignment) file. * * @param locator * @param newTracks */ private void loadTribbleFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException, TribbleIndexNotFoundException { String format = locator.getFormat(); // Mutation (mut, maf, vcf) files are handled special. Check here, rather than depend on order in giant case statement. if (MutationTrackLoader.isMutationAnnotationFile(locator)) { loadMutFile(locator, newTracks, genome); // Must be tried before generic "loadIndexed" below } else if (VariantTrack.isVCF(format)) { loadVCF(locator, newTracks, genome); } else { FeatureSource src; if (locator.isDataURL()) { // Simulate a tribble source DataURLParser parser = new DataURLParser(); parser.parseFeatures(locator.getPath(), format, genome); src = new FeatureCollectionSource(parser.getFeatures(), genome); TrackProperties tp = parser.getTrackProperties(); if (tp != null) { ((FeatureCollectionSource) src).setHeader(tp); } } else { TribbleFeatureSource tribbleFeatureSource = TribbleFeatureSource.getFeatureSource(locator, genome); if (GFFFeatureSource.isGFF(locator.getPath())) { GFFCodec codec = (GFFCodec) CodecFactory.getCodec(locator, genome); src = new GFFFeatureSource(tribbleFeatureSource, codec.getVersion()); } else { src = tribbleFeatureSource; } } // Create feature source and track FeatureTrack t = new FeatureTrack(locator, src); //t.setRendererClass(BasicTribbleRenderer.class); // Set track properties from header Object header = src.getHeader(); if (header != null && header instanceof FeatureFileHeader) { FeatureFileHeader ffh = (FeatureFileHeader) header; if (ffh.getTrackType() != null) { t.setTrackType(ffh.getTrackType()); } if (ffh.getTrackProperties() != null) { TrackProperties tp = ffh.getTrackProperties(); t.setProperties(tp); t.setTrackLine(tp.getTrackLine()); } if (ffh.getTrackType() == TrackType.REPMASK) { t.setHeight(15); } } String path = locator.getPath().toLowerCase(); if (path.contains(".narrowpeak") || locator.getPath().contains(".broadpeak") || locator.getPath().contains(".gappedpeak") || locator.getPath().contains(".regionpeak")) { t.setUseScore(true); } newTracks.add(t); } } private void loadDSIFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException, TribbleIndexNotFoundException { TribbleFeatureSource tribbleFeatureSource = TribbleFeatureSource.getFeatureSource(locator, genome); // Create feature source and track DSITrack t = new DSITrack(locator, tribbleFeatureSource); t.setName(locator.getTrackName()); //t.setRendererClass(BasicTribbleRenderer.class); // Set track properties from header Object header = tribbleFeatureSource.getHeader(); if (header != null && header instanceof TrackProperties) { TrackProperties tp = (TrackProperties) header; t.setProperties(tp); t.setTrackLine(tp.getTrackLine()); } t.setRendererClass(DSIRenderer.class); newTracks.add(t); } /** * Load GWAS PLINK result file * * @param locator * @param newTracks * @throws IOException */ private void loadGWASFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { GWASParser gwasParser = new GWASParser(locator, genome); Map<String, List<GWASFeature>> gwasData = gwasParser.parse(); GWASTrack gwasTrack = new GWASTrack(locator, locator.getPath(), locator.getFileName(), gwasData, gwasParser.getColumnHeaders(), genome); newTracks.add(gwasTrack); } private void loadGctFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { if (locator.isLocal()) { if (!checkSize(locator)) { return; } } ExpressionFileParser parser = null; ExpressionDataset ds = null; parser = new ExpressionFileParser(locator, null, genome); ds = parser.createDataset(); if (ds.isEmpty()) { String message = "The probes in the file <br>&nbsp;&nbsp;&nbsp;" + locator.getPath() + "<br>" + "could not be mapped to genomic positions. This can be corrected by specify a probe mapping<br>" + "file from the Preferences window (Probes tab), or by specifing the genomic positions in the<br>" + "expression data file. Please see the user guide for more details."; MessageUtils.showMessage(message); } else { ds.setName(locator.getTrackName()); ds.setNormalized(true); ds.setLogValues(true); /* * File outputFile = new File(IGV.DEFAULT_USER_DIRECTORY, file.getName() + ".h5"); * OverlappingProcessor proc = new OverlappingProcessor(ds); * proc.setZoomMax(0); * proc.process(outputFile.getAbsolutePath()); * loadH5File(outputFile, messages, attributeList, group); */ // Counter for generating ID TrackProperties trackProperties = ds.getTrackProperties(); String path = locator.getPath(); for (String trackName : ds.getTrackNames()) { DatasetDataSource dataSource = new DatasetDataSource(trackName, ds, genome); String trackId = path + "_" + trackName; Track track = new DataSourceTrack(locator, trackId, trackName, dataSource); track.setRendererClass(HeatmapRenderer.class); track.setProperties(trackProperties); newTracks.add(track); } } } /** * Load features from a genbank (.gbk)file. This method ignores the fasta section. To define a genome from * a genbank file use GenomeManager. * * @param newTracks * @param genome * @throws IOException */ private void loadGbkFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { GenbankParser genbankParser = new GenbankParser(locator.getPath()); genbankParser.readFeatures(false); FeatureCollectionSource src = new FeatureCollectionSource(genbankParser.getFeatures(), genome); FeatureTrack track = new FeatureTrack(locator, src); newTracks.add(track); } private void loadIGVFile(ResourceLocator locator, List<Track> newTracks, Genome genome) { if (locator.isLocal()) { if (!checkSize(locator)) { return; } } String dsName = locator.getTrackName(); IGVDataset ds = new IGVDataset(locator, genome); ds.setName(dsName); TrackProperties trackProperties = ds.getTrackProperties(); String path = locator.getPath(); TrackType type = ds.getType(); for (String trackName : ds.getTrackNames()) { DatasetDataSource dataSource = new DatasetDataSource(trackName, ds, genome); String trackId = path + "_" + trackName; DataSourceTrack track = new DataSourceTrack(locator, trackId, trackName, dataSource); // track.setRendererClass(HeatmapRenderer.class); track.setTrackType(ds.getType()); track.setProperties(trackProperties); if (type == TrackType.ALLELE_FREQUENCY) { track.setRendererClass(PointsRenderer.class); track.setHeight(40); } newTracks.add(track); } } private void loadCufflinksFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { final String path = locator.getPath(); final String format = locator.getFormat(); List<DataTrack> cuffTracks = new ArrayList<DataTrack>(); if (format.equals("fpkm_tracking")) { FPKMTrackingCodec codec = new FPKMTrackingCodec(path); List<FPKMValue> values = CufflinksParser.parse(codec, path); for (int sampleIndex = 0; sampleIndex < codec.getNumSamples(); sampleIndex++) { CufflinksDataSource ds = new CufflinksDataSource(sampleIndex, values, genome); String supId = String.format("q%02d", sampleIndex); DataTrack track = new DataSourceTrack(locator, locator.getPath() + " " + supId, locator.getTrackName() + " " + supId, ds); cuffTracks.add(track); } } else if (format.equals("gene_exp.diff") || format.equals("cds_exp.diff")) { AsciiFeatureCodec<ExpDiffValue> codec = new ExpDiffCodec(path); List<ExpDiffValue> values = CufflinksParser.parse(codec, path); CufflinksDataSource ds = new CufflinksDataSource(values, genome); DataTrack track = new DataSourceTrack(locator, locator.getPath(), locator.getTrackName(), ds); cuffTracks.add(track); } else { throw new RuntimeException("Unsupported file type: " + path); } for (DataTrack track : cuffTracks) { track.setTrackType(TrackType.FPKM); CufflinksTrack.setCufflinksScale(track); newTracks.add(track); } } private static boolean checkSize(ResourceLocator locator) { if (!PreferencesManager.getPreferences().getAsBoolean(SHOW_SIZE_WARNING)) { return true; } final String path = locator.getPath(); long size = FileUtils.getLength(path); int maxSize = 200000000; // 200 mb if (path.endsWith(".gz") || path.endsWith(".bgz")) { maxSize /= 4; } if (size > maxSize) { String message = "The file " + path + " is large (" + (size / 1000000) + " mb). It is recommended " + "that large files be converted to the binary <i>.tdf</i> format using the IGVTools " + "<b>toTDF</b> command. Loading unconverted ascii fies of this size can lead to poor " + "performance or unresponsiveness (freezing). " + "<br><br>IGVTools can be launched from the <b>Tools</b> menu or separately as a " + "command line program. See the user guide for more details.<br><br>Click <b>Continue</b> " + "to continue loading, or <b>Cancel</b> to skip this file."; return ConfirmDialog.optionallyShowConfirmDialog(message, SHOW_SIZE_WARNING, true); } return true; } private void loadDOTFile(ResourceLocator locator, List<Track> newTracks) { //GraphTrack gt = new GraphTrack(locator); //gt.setHeight(80); //newTracks.add(gt); } private void loadWigFile(ResourceLocator locator, List<Track> newTracks, Genome genome) { if (locator.isLocal()) { if (!checkSize(locator)) { return; } } WiggleDataset ds = (new WiggleParser(locator, genome)).parse(); TrackProperties props = ds.getTrackProperties(); // In case of conflict between the resource locator display name and the track properties name, // use the resource locator String name = props == null ? null : props.getName(); String label = locator.getName(); if (name == null) { name = locator.getFileName(); } else if (label != null) { props.setName(label); // erase name rom track properties } String path = locator.getPath(); boolean multiTrack = ds.getTrackNames().length > 1; for (String heading : ds.getTrackNames()) { String trackId = multiTrack ? path + "_" + heading : path; String trackName = multiTrack ? heading : name; DatasetDataSource dataSource = new DatasetDataSource(trackId, ds, genome); DataSourceTrack track = new DataSourceTrack(locator, trackId, trackName, dataSource); String displayName = (label == null || multiTrack) ? heading : label; track.setName(displayName); track.setProperties(props); track.setTrackType(ds.getType()); if (ds.getType() == TrackType.EXPR) { track.setWindowFunction(WindowFunction.none); } newTracks.add(track); } } public void loadTDFFile(ResourceLocator locator, List<Track> newTracks, Genome genome) { log.debug("Loading TDF file " + locator.getPath()); TDFReader reader = TDFReader.getReader(locator); TrackType type = reader.getTrackType(); TrackProperties props = null; String trackLine = reader.getTrackLine(); if (trackLine != null && trackLine.length() > 0) { props = new TrackProperties(); ParsingUtils.parseTrackLine(trackLine, props); } String name = locator.getName(); if (name == null) { name = props == null ? locator.getTrackName() : props.getName(); } int trackNumber = 0; String path = locator.getPath(); boolean multiTrack = reader.getTrackNames().length > 1; for (String heading : reader.getTrackNames()) { String trackId = multiTrack ? path + "_" + heading : path; String trackName = multiTrack ? heading : name; final DataSource dataSource = locator.getPath().endsWith(".counts") ? new GobyCountArchiveDataSource(locator) : new TDFDataSource(reader, trackNumber, heading, genome); DataSourceTrack track = new DataSourceTrack(locator, trackId, trackName, dataSource); String displayName = (name == null || multiTrack) ? heading : name; track.setName(displayName); track.setTrackType(type); if (props != null) { track.setProperties(props); } newTracks.add(track); trackNumber++; } } public void loadBWFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { String trackName = locator.getTrackName(); String trackId = locator.getPath(); String path = locator.getPath(); BBFileReader reader = new BBFileReader(path); BigWigDataSource bigwigSource = new BigWigDataSource(reader, genome); if (reader.isBigWigFile()) { DataSourceTrack track = new DataSourceTrack(locator, trackId, trackName, bigwigSource); newTracks.add(track); } else if (reader.isBigBedFile()) { if (locator.getPath().contains("RRBS_cpgMethylation") || locator.getPath().contains("BiSeq_cpgMethylation") || (reader.getAutoSql() != null && reader.getAutoSql().startsWith("table BisulfiteSeq"))) { loadMethylTrack(locator, reader, newTracks, genome); } else { FeatureTrack track = new FeatureTrack(locator, trackId, trackName, bigwigSource); newTracks.add(track); } } else { throw new RuntimeException("Unknown BIGWIG type: " + locator.getPath()); } } private void loadMethylTrack(ResourceLocator locator, BBFileReader reader, List<Track> newTracks, Genome genome) throws IOException { MethylTrack track = new MethylTrack(locator, reader, genome); newTracks.add(track); } private void loadGobyCountsArchive(ResourceLocator locator, List<Track> newTracks, Genome genome) { if (log.isDebugEnabled()) { log.debug("Loading Goby counts archive: " + locator.toString()); } String trackId = locator.getSampleId() + " coverage"; String trackName = locator.getFileName(); final DataSource dataSource = new GobyCountArchiveDataSource(locator); DataSourceTrack track = new DataSourceTrack(locator, trackId, trackName, dataSource); newTracks.add(track); } private void loadEwigIBFFile(ResourceLocator locator, List<Track> newTracks, Genome genome) { TDFReader reader = TDFReader.getReader(locator.getPath()); TrackProperties props = null; String trackLine = reader.getTrackLine(); if (trackLine != null && trackLine.length() > 0) { props = new TrackProperties(); ParsingUtils.parseTrackLine(trackLine, props); } EWigTrack track = new EWigTrack(locator, genome); if (props != null) { track.setProperties(props); } track.setName(locator.getTrackName()); newTracks.add(track); } private void loadListFile(ResourceLocator locator, List<Track> newTracks, Genome genome) { try { FeatureSource source = new FeatureDirSource(locator, genome); FeatureTrack track = new FeatureTrack(locator, source); track.setName(locator.getTrackName()); track.setVisibilityWindow(0); newTracks.add(track); } catch (IOException ex) { throw new RuntimeException(ex); } } private void loadGisticFile(ResourceLocator locator, List<Track> newTracks) { GisticTrack track = GisticFileParser.loadData(locator); track.setName(locator.getTrackName()); newTracks.add(track); } private void loadMultipleAlignmentTrack(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { MultipleAlignmentTrack t = new MultipleAlignmentTrack(locator, genome); t.setName("Multiple Alignments"); newTracks.add(t); } private void loadAlignmentsTrack(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { try { String dsName = locator.getTrackName(); // If the user tried to load the index, look for the file (this is a common mistake) final String format = locator.getFormat(); if (format.equals("sai") || format.equals("bai") || format.equals("csi")) { MessageUtils.showMessage("<html><b>ERROR:</b> Loading SAM/BAM index files are not supported: " + locator.getPath() + "<br>Load the SAM or BAM file directly. "); return; } AlignmentDataManager dataManager = new AlignmentDataManager(locator, genome); // Check that alignments we loaded actually match some data. Many BAM files will contain some sequences // not represented in the genome, buf if there are no matches warn the user. List<String> seqNames = dataManager.getSequenceNames(); if (seqNames != null && seqNames.size() > 0) { if (!dataManager.hasMatchingSequences()) { showMismatchSequenceNameMessage(locator.getPath(), genome, seqNames); } } if (format.equals("bam") || format.equals("cram")) { if (!dataManager.hasIndex()) { MessageUtils.showMessage("<html>Could not load index file for: " + locator.getPath() + "<br> An index file is required for SAM & BAM files."); return; } } AlignmentTrack alignmentTrack = new AlignmentTrack(locator, dataManager, genome); // parser.loadTrack(locator, dsName) alignmentTrack.setName(dsName); alignmentTrack.setVisible(PreferencesManager.getPreferences().getAsBoolean(SAM_SHOW_ALIGNMENT_TRACK)); // Create coverage track CoverageTrack covTrack = new CoverageTrack(locator, dsName + " Coverage", alignmentTrack, genome); newTracks.add(covTrack); covTrack.setDataManager(dataManager); dataManager.setCoverageTrack(covTrack); alignmentTrack.setCoverageTrack(covTrack); // Precalculated coverage data (can be null) String covPath = locator.getCoverage(); // Search for precalculated coverage data by naming convention. Bypass for certain cloud resources if (covPath == null || covPath.equals(".")) { String path = locator.getPath(); boolean bypassFileAutoDiscovery = PreferencesManager.getPreferences().getAsBoolean(BYPASS_FILE_AUTO_DISCOVERY) || GoogleUtils.isGoogleCloud(locator.getPath()) || path.contains("dropbox.com") || path.contains("dataformat=.bam") || path.contains("/query.cgi?"); if (!bypassFileAutoDiscovery) { covPath = path + ".tdf"; } } if (covPath != null && !covPath.equals(".")) { if (FileUtils.resourceExists(covPath)) { log.debug("Loading TDF for coverage: " + covPath); try { TDFReader reader = TDFReader.getReader(covPath); TDFDataSource ds = new TDFDataSource(reader, 0, dsName + " coverage", genome); covTrack.setDataSource(ds); } catch (Exception e) { log.error("Error loading coverage TDF file", e); } } } SpliceJunctionTrack spliceJunctionTrack = new SpliceJunctionTrack(locator, dsName + " Junctions", dataManager, alignmentTrack, SpliceJunctionTrack.StrandOption.BOTH); spliceJunctionTrack.setHeight(60); newTracks.add(spliceJunctionTrack); alignmentTrack.setSpliceJunctionTrack(spliceJunctionTrack); alignmentTrack.init(); newTracks.add(alignmentTrack); log.debug("Alignment track loaded"); } catch (IndexNotFoundException e) { MessageUtils.showMessage("<html>Could not find the index file for <br><br>&nbsp;&nbsp;" + e.getSamFile() + "<br><br>Note: The index file can be created using igvtools and must be in the same directory as the .sam file."); } } private void showMismatchSequenceNameMessage(String filename, Genome genome, List<String> seqNames) { StringBuffer message = new StringBuffer(); message.append("<html>File: " + filename + "<br>does not contain any sequence names which match the current genome."); message.append("<br><br>File: &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;"); int n = 0; for (String sn : seqNames) { message.append(sn + ", "); n++; if (n > 3) { message.append(" ..."); break; } } message.append("<br>Genome: "); n = 0; for (String cn : genome.getAllChromosomeNames()) { message.append(cn + ", "); n++; if (n > 3) { message.append(" ..."); break; } } MessageUtils.showMessage(message.toString()); } /** * Load a mutation file (".mut" or ".maf"). * * @param locator * @param newTracks */ private void loadMutFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException, TribbleIndexNotFoundException { MutationTrackLoader loader = new MutationTrackLoader(); List<FeatureTrack> mutationTracks = loader.loadMutationTracks(locator, genome); for (FeatureTrack track : mutationTracks) { track.setTrackType(TrackType.MUTATION); track.setRendererClass(MutationRenderer.class); newTracks.add(track); } } private void loadSegFile(ResourceLocator locator, List<Track> newTracks, Genome genome) { // TODO - -handle remote resource SegmentedDataSet ds; String path = locator.getPath().toLowerCase(); if (path.endsWith("seg.zip")) { ds = new SegmentedBinaryDataSet(locator); } else { SegmentFileParser parser = new SegmentFileParser(locator); ds = parser.loadSegments(locator, genome); } loadSegTrack(locator, newTracks, genome, ds); } /** * Add the provided SegmentedDataSet to the list of tracks, * set other relevant properties * * @param locator * @param newTracks * @param genome * @param ds */ private void loadSegTrack(ResourceLocator locator, List<Track> newTracks, Genome genome, SegmentedDataSet ds) { String path = locator.getPath(); TrackProperties props = null; if (ds instanceof SegmentedAsciiDataSet) { props = ((SegmentedAsciiDataSet) ds).getTrackProperties(); } // The "freq" track. TODO - make this optional if ((ds.getType() == TrackType.COPY_NUMBER || ds.getType() == TrackType.CNV) && ds.getSampleNames().size() > 1) { FreqData fd = new FreqData(ds, genome); String freqTrackId = path; String freqTrackName = "CNV Summary"; CNFreqTrack freqTrack = new CNFreqTrack(locator, freqTrackId, freqTrackName, fd); if (props != null) { freqTrack.setProperties(props); } newTracks.add(freqTrack); } for (String trackName : ds.getSampleNames()) { String trackId = path + "_" + trackName; SegmentedDataSource dataSource = new SegmentedDataSource(trackName, ds); DataSourceTrack track = new DataSourceTrack(locator, trackId, trackName, dataSource); track.setRendererClass(HeatmapRenderer.class); track.setTrackType(ds.getType()); if (props != null) { track.setProperties(props); } newTracks.add(track); } } private void loadTrioData(ResourceLocator locator) throws IOException { PedigreeUtils.parseTrioFile(locator.getPath()); } /** * Convert an RNA chemical reactivity file (.shape, .map) into a .wig file * and load. */ private void convertLoadShapeFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { String inPath = locator.getPath(); String fileName = locator.getFileName(); String outPath = inPath + ".wig"; String message = "The chemical reactivity file <br> &nbsp;&nbsp;" + fileName + "<br> needs to be converted to IGV chromosome <br>" + "coordinates and .wig format before loading. <br><br>Click <b>Continue</b> " + "to save converted file to <br> &nbsp;&nbsp;" + fileName + ".wig" + "<br>and load with the selected options, or <b>Cancel</b> to skip this<br>file."; ConvertOptions opts = ConvertFileDialog.showConvertFileDialog(message); if (opts.doConvert) { ShapeFileUtils.shapeToWigFile(inPath, outPath, opts.chrom, opts.strand, opts.start); loadWigFile(new ResourceLocator(outPath), newTracks, genome); } } /** * Convert various RNA structure formats to a more easily parseable format * in genomic coordinates, then load converted file. */ private void convertLoadStructureFile(ResourceLocator locator, List<Track> newTracks, Genome genome, String fileType) throws IOException { String inPath = locator.getPath(); String fileName = locator.getFileName(); String outPath = inPath + ".bp"; String message = "The RNA structure file <br> &nbsp;&nbsp;" + fileName + "<br> needs to be converted to IGV chromosome <br>" + "coordinates and .bp format before loading. <br><br>Click <b>Continue</b> " + "to save converted file to <br> &nbsp;&nbsp;" + fileName + ".bp" + "<br>and load with the selected options, or <b>Cancel</b> to skip this<br>file."; ConvertOptions opts = ConvertFileDialog.showConvertFileDialog(message); if (opts.doConvert) { if (fileType == "connectTable") { BasePairFileUtils.connectTableToBasePairFile(inPath, outPath, opts.chrom, opts.strand, opts.start); } else if (fileType == "pairingProb") { BasePairFileUtils.pairingProbToBasePairFile(inPath, outPath, opts.chrom, opts.strand, opts.start); } else if (fileType == "dotBracket") { BasePairFileUtils.dotBracketToBasePairFile(inPath, outPath, opts.chrom, opts.strand, opts.start); } loadBasePairFile(new ResourceLocator(outPath), newTracks, genome); } } private void loadBasePairFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { String name = locator.getTrackName(); String path = locator.getPath(); String id = path + "_" + name; newTracks.add(new BasePairTrack(locator, id, name, genome)); } public static boolean isIndexed(ResourceLocator locator, Genome genome) { // Checking for the index is expensive over HTTP. First see if this is an indexable format by fetching the codec String fullPath = locator.getPath(); String pathNoQuery = locator.getURLPath(); if (!CodecFactory.hasCodec(locator, genome)) { return false; } String indexExtension = pathNoQuery.endsWith("gz") ? ".tbi" : ".idx"; String indexPath = fullPath + indexExtension; if (HttpUtils.isRemoteURL(fullPath)) { //Handle query string, if it exists String[] toks = fullPath.split("\\?", 2); if (toks.length == 2) { indexPath = String.format("%s%s?%s", toks[0], indexExtension, toks[1]); } } return FileUtils.resourceExists(indexPath); } public static TrackProperties getTrackProperties(Object header) { try { FeatureFileHeader ffHeader = (FeatureFileHeader) header; if (ffHeader != null) { return ffHeader.getTrackProperties(); } else { return null; } } catch (ClassCastException e) { return null; } } }
import java.util.Scanner; class TicketLottery { public static void main(String[] args) { Scanner sc = new Scanner(System.in); System.out.println(getProbabilities(sc.nextLine())); } public static String getProbabilities(String input) { double[] numbers = splitNumbers(input); double lottery_people = numbers[0], winners = numbers[1], tickets_per_winner = numbers[2], group_size = numbers[3]; if (!enoughTickets(winners, tickets_per_winner, group_size)) return "0"; int winners_needed = (int)Math.ceil(group_size/tickets_per_winner); return ""; } private static double[] splitNumbers(String input) { String[] parts = input.split(" "); return new double[] { Integer.parseInt(parts[0]), Integer.parseInt(parts[1]), Integer.parseInt(parts[2]), Integer.parseInt(parts[3]) }; } // for the whole group private static boolean enoughTickets(double winners, double tickets_per_winner, double group_size) { return (winners*tickets_per_winner >= group_size); } }
package monoxide.forgebackup; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.Date; import java.util.List; import java.util.logging.Level; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; import net.minecraft.command.ICommandSender; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.server.MinecraftServer; import net.minecraft.util.IProgressUpdate; import net.minecraft.world.MinecraftException; import net.minecraft.world.WorldServer; import net.minecraft.world.storage.ISaveHandler; import net.minecraft.world.storage.SaveHandler; import com.google.common.collect.Lists; public class CommandBackup extends CommandBackupBase { public CommandBackup(MinecraftServer server) { super(server); } @Override public String getCommandName() { return "backup"; } @Override public boolean canCommandSenderUseCommand(ICommandSender sender) { if (sender instanceof EntityPlayer) { if (!server.isDedicatedServer()) { return true; } } return super.canCommandSenderUseCommand(sender); } @Override public void processCommand(ICommandSender sender, String[] args) { boolean failure = false; notifyBackupAdmins(sender, "ForgeBackup.backup.start"); notifyBackupAdmins(sender, "ForgeBackup.save.disabled"); toggleSavability(false); try { notifyBackupAdmins(sender, "ForgeBackup.save.force"); forceSaveAllWorlds(); notifyBackupAdmins(sender, "ForgeBackup.backup.progress"); doBackup(sender); } catch (MinecraftException e) { notifyBackupAdmins(sender, Level.SEVERE, "ForgeBackup.backup.aborted"); BackupLog.log(Level.SEVERE, e, e.getMessage()); return; } catch (IOException e) { notifyBackupAdmins(sender, Level.SEVERE, "ForgeBackup.backup.aborted"); BackupLog.log(Level.SEVERE, e, e.getMessage()); return; } finally { notifyBackupAdmins(sender, "ForgeBackup.save.enabled"); toggleSavability(true); } notifyBackupAdmins(sender, "ForgeBackup.backup.complete"); } private void toggleSavability(boolean canSave) { for (int i = 0; i < server.worldServers.length; ++i) { if (server.worldServers[i] != null) { WorldServer worldServer = server.worldServers[i]; worldServer.canNotSave = !canSave; } } } private void forceSaveAllWorlds() throws MinecraftException { if (server.getConfigurationManager() != null) { server.getConfigurationManager().saveAllPlayerData(); } for (int i = 0; i < server.worldServers.length; ++i) { if (server.worldServers[i] != null) { WorldServer var5 = server.worldServers[i]; boolean var6 = var5.canNotSave; var5.canNotSave = false; var5.saveAllChunks(true, (IProgressUpdate)null); var5.canNotSave = var6; } } } private void doBackup(ICommandSender sender) throws IOException { ISaveHandler saveHandler = server.worldServers[0].getSaveHandler(); File backupsFolder = new File(getBackupFolder(), saveHandler.getSaveDirectoryName()); if (backupsFolder.exists() && !backupsFolder.isDirectory()) { notifyBackupAdmins(sender, Level.WARNING, "ForgeBackup.backup.folderExists"); return; } else if (!backupsFolder.exists()) { backupsFolder.mkdirs(); } List<File> thingsToSave = Lists.newArrayList(); if (ForgeBackup.instance().config().willBackupWorld()) { if (saveHandler instanceof SaveHandler) { thingsToSave.add(((SaveHandler)saveHandler).getSaveDirectory()); } else { thingsToSave.add(server.getFile(saveHandler.getSaveDirectoryName())); } } if (ForgeBackup.instance().config().willBackupConfiguration()) { thingsToSave.add(server.getFile("config")); } if (ForgeBackup.instance().config().willBackupMods()) { thingsToSave.add(server.getFile("mods")); thingsToSave.add(server.getFile("coremods")); } if (ForgeBackup.instance().config().willBackupServerConfiguration()) { thingsToSave.add(server.getFile("banned-ips.txt")); thingsToSave.add(server.getFile("banned-players.txt")); thingsToSave.add(server.getFile("ops.txt")); thingsToSave.add(server.getFile("server.properties")); thingsToSave.add(server.getFile("white-list.txt")); } File backupFile = new File(backupsFolder, getBackupFileName()); createNewBackup(backupFile, thingsToSave); } private void createNewBackup(File backupFile, List<File> toBackup) throws IOException { ZipOutputStream backup = new ZipOutputStream(new FileOutputStream(backupFile)); byte[] buffer = new byte[4096]; int readBytes; while (!toBackup.isEmpty()) { File current = toBackup.remove(0); if (!current.exists()) { continue; } if (current.isDirectory()) { for (File child : current.listFiles()) { toBackup.add(child); } } else { backup.putNextEntry(new ZipEntry(cleanZipPath(current.getCanonicalPath()))); try { InputStream currentStream = new FileInputStream(current); while ((readBytes = currentStream.read(buffer)) >= 0) { backup.write(buffer, 0, readBytes); } currentStream.close(); } catch (IOException e) { BackupLog.warning("Couldn't backup file: %s", current.getPath()); } backup.closeEntry(); } } backup.close(); } private File getBackupFolder() { String backupFolder = ForgeBackup.instance().config().getBackupFolderName(); File absoluteFile = new File(backupFolder); return absoluteFile.getAbsolutePath() == backupFolder ? absoluteFile : server.getFile(backupFolder); } private String getBackupFileName() { Date now = new Date(); return String.format("%TY%Tm%Td-%TH%TM%TS.zip", now, now, now, now, now, now); } private String cleanZipPath(String path) throws IOException { String dataDirectory = server.getFile(".").getCanonicalPath(); if (path.substring(0, dataDirectory.length()).equals(dataDirectory)) { return path.substring(dataDirectory.length()+1); } return path; } }
package uk.ac.ebi.spot.goci.service; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import uk.ac.ebi.spot.goci.model.CatalogSummaryView; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collection; import java.util.List; @Service public class ProcessView { private NCBICatalogService ncbiCatalogService; @Autowired public ProcessView(NCBICatalogService ncbiCatalogService) { this.ncbiCatalogService = ncbiCatalogService; } public List<String> serialiseViews() { Collection<CatalogSummaryView> views = ncbiCatalogService.getCatalogSummaryViewsWithStatusSendToNcbi(); List<String> serialisedViews = new ArrayList<String>(); // For each view create a line from the data returned for (CatalogSummaryView view : views) { String line = ""; // Format dates DateFormat df = new SimpleDateFormat("dd-MMM-yyyy"); // As a default set all strings to empty string with tab // Also trim strings as the database contains newlines/tabs etc. String dateAddedToCatalog = "" + "\t"; if (view.getCatalogAddedDate() != null) { dateAddedToCatalog = df.format(view.getCatalogAddedDate()) + "\t"; } String pubmedId = "" + "\t"; if (view.getPubmedId() != null) {pubmedId = view.getPubmedId().trim() + "\t";} String firstAuthor = "" + "\t"; if (view.getAuthor() != null) {firstAuthor = view.getAuthor().trim() + "\t";} String date = "" + "\t"; if (view.getPublicationDate() != null) { date = df.format(view.getPublicationDate()) + "\t"; } String journal = "" + "\t"; if (view.getJournal() != null) { journal = view.getJournal().trim() + "\t"; } String link = "" + "\t"; if (view.getLink() != null) {link = view.getLink().trim() + "\t";} String study = "" + "\t"; if (view.getStudy() != null) { study = view.getStudy().trim() + "\t";} String diseaseTrait = "" + "\t"; if (view.getDiseaseTrait() != null) { diseaseTrait = view.getDiseaseTrait().trim() + "\t";} String initialSampleSize = "" + "\t"; if (view.getInitialSampleDescription() != null) { initialSampleSize = view.getInitialSampleDescription().trim() + "\t"; } String replicateSampleSize = "" + "\t"; if (view.getReplicateSampleDescription() != null) { replicateSampleSize = view.getReplicateSampleDescription().trim() + "\t"; } String region = "" + "\t"; if (region != null) {region = view.getRegion().trim() + "\t";} String reportedGenes = "" + "\t"; if (view.getReportedGene() != null) { reportedGenes = view.getReportedGene().trim() + "\t"; } String strongestSnpRiskAllele = "" + "\t"; if (view.getStrongestSnpRiskAllele() != null) { strongestSnpRiskAllele = view.getStrongestSnpRiskAllele().trim() + "\t"; } String snps = "" + "\t"; if (view.getSnpRsid() != null) {snps = view.getSnpRsid().trim() + "\t";} String riskAlleleFrequency = "" + "\t"; if (view.getRiskAlleleFrequency() != null) { riskAlleleFrequency = view.getRiskAlleleFrequency().trim() + "\t"; } String pValue = "" + "\t"; if (view.getpValueMantissa() != null && view.getpValueExponent() != null) { pValue = view.getpValueMantissa().toString() + "E" + view.getpValueExponent().toString() + "\t"; } String pValueText = "" + "\t"; if (view.getpValueQualifier() != null) {pValueText = view.getpValueQualifier().trim() + "\t";} String orBeta = "" + "\t"; if (view.getOrBeta() != null) { orBeta = view.getOrBeta().toString().trim() + "\t";} String ciText = "" + "\t"; if (view.getCi() != null && view.getCiQualifier() != null) { ciText = view.getCi() + " " + view.getCiQualifier() + "\t"; } else if (view.getCi() != null) { ciText = view.getCi() + "\t";} else if (view.getCiQualifier() != null) {ciText = view.getCiQualifier() + "\t";} String platform = "" + "\t"; if (view.getPlatform() != null) { platform = view.getPlatform().trim() + "\t";} String cnv = ""; if (view.getCnv() == true) { cnv = "Y" + "\t"; } else {cnv = "N" + "\t";} String associationId = "" + "\t"; if (view.getAssociationId() != null) { associationId = view.getAssociationId().toString() + "\t";} String studyId = "" + "\t"; if (view.getStudyId() != null) { studyId = view.getStudyId().toString() + "\t";} String resultPublished = ""; if (view.getResultPublished() != null) { resultPublished = "Y" + "\t"; } else {resultPublished = "N" + "\t";} line = dateAddedToCatalog + pubmedId + firstAuthor + date + journal + link + study + diseaseTrait + initialSampleSize + replicateSampleSize + region + reportedGenes + strongestSnpRiskAllele + snps + riskAlleleFrequency + pValue + pValueText + orBeta + ciText + platform + cnv + associationId + studyId + resultPublished + "\n"; serialisedViews.add(line); } // end for return serialisedViews; } // For each line public void createFileForNcbi(String fileName, List<String> serialisedViews) { // Create a file from the file name supplied File file = new File(fileName); BufferedWriter output = null; try { output = new BufferedWriter(new FileWriter(file)); } catch (IOException e) { e.printStackTrace(); } // Create file header line String header = ""; header = "DATE ADDED TO CATALOG" + "\t" + "PUBMEDID" + "\t" + "FIRST AUTHOR" + "\t" + "DATE" + "\t" + "JOURNAL" + "\t" + "LINK" + "\t" + "STUDY" + "\t" + "DISEASE/TRAIT" + "\t" + "INITIAL SAMPLE SIZE" + "\t" + "REPLICATION SAMPLE SIZE" + "\t" + "REGION" + "\t" + "REPORTED GENE(S)" + "\t" + "STRONGEST SNP-RISK ALLELE" + "\t" + "SNPS" + "\t" + "RISK ALLELE FREQUENCY" + "\t" + "P-VALUE" + "\t" + "P-VALUE (TEXT)" + "\t" + "OR OR BETA" + "\t" + "95% CI (TEXT)" + "\t" + "PLATFORM [SNPS PASSING QC]" + "\t" + "CNV" + "\t" + "GWASTUDIESSNPID" + "\t" + "GWASTUDYID" + "\t" + "RESULTPUBLISHED" + "\n"; try { output.write(header); } catch (IOException e) { e.printStackTrace(); } // Write each line for (String view : serialisedViews) { try { output.write(view); } catch (IOException e) { e.printStackTrace(); } } try { output.close(); } catch (IOException e) { e.printStackTrace(); } } }
package moonlightowl.openblocks; import javafx.application.Application; import javafx.application.Platform; import javafx.fxml.FXMLLoader; import javafx.scene.Scene; import javafx.scene.control.*; import javafx.scene.image.Image; import javafx.scene.image.ImageView; import javafx.scene.input.KeyCode; import javafx.scene.input.MouseButton; import javafx.scene.input.MouseEvent; import javafx.scene.layout.AnchorPane; import javafx.scene.layout.GridPane; import javafx.scene.layout.HBox; import javafx.scene.layout.Priority; import javafx.stage.FileChooser; import javafx.stage.Stage; import javafx.stage.WindowEvent; import moonlightowl.openblocks.io.JSON; import moonlightowl.openblocks.structure.Block; import moonlightowl.openblocks.structure.Joint; import moonlightowl.openblocks.structure.Wire; import moonlightowl.openblocks.ui.About; import moonlightowl.openblocks.ui.ToolButton; import moonlightowl.openblocks.ui.ToolPane; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; import java.io.*; import java.util.Optional; public class OpenBlocks extends Application { private Stage parentStage; // FXML links public AnchorPane rootPane; public MenuBar menuBar; public HBox toolBar; public ScrollPane scroller; // Custom elements private ToolPane[] tools; private About about; private Workspace workspace; private boolean selectedTrash = false; private Blocks.Id selected; private ImageView selectedIcon; private Wire wire; private File projectFile; private boolean changed = false; @Override public void start(Stage primaryStage) throws Exception { parentStage = primaryStage; FXMLLoader loader = new FXMLLoader(getClass().getResource("/main.fxml")); loader.setController(this); loader.load(); // Resources Assets.load(); // Generate GUI initUI(); // Create IDE window setTitle(Settings.UNTITLED); primaryStage.setScene(new Scene(rootPane, Settings.WIDTH, Settings.HEIGHT)); primaryStage.setMinWidth(500); primaryStage.setMinHeight(400); primaryStage.show(); // Handle exit primaryStage.setOnCloseRequest(event -> { if(changed) { Alert alert = new Alert(Alert.AlertType.CONFIRMATION); alert.setTitle("Завершение работы"); alert.setHeaderText("Сохранение изменений"); alert.setContentText("В проект были внесены изменения. Сохранить?"); ButtonType buttonSave = new ButtonType("Сохранить"); ButtonType buttonDiscard = new ButtonType("Не сохранять"); ButtonType buttonCancel = new ButtonType("Отмена", ButtonBar.ButtonData.CANCEL_CLOSE); alert.getButtonTypes().setAll(buttonSave, buttonDiscard, buttonCancel); Optional<ButtonType> result = alert.showAndWait(); if (result.get() == buttonSave){ saveProject(); } else if (result.get() == buttonCancel) { event.consume(); } } }); } public static void main(String[] args) { launch(args); } public void setTitle(String title){ if(title != null) parentStage.setTitle(title + " - OpenBlocks " + Settings.VERSION); else parentStage.setTitle("OpenBlocks " + Settings.VERSION); changed = false; } public void projectChanged() { if(!changed){ parentStage.setTitle("*"+parentStage.getTitle()); changed = true; } } public void initUI(){ about = new About(parentStage); workspace = new Workspace(scroller); selectedIcon = new ImageView(); selectedIcon.setScaleX(0.3); selectedIcon.setScaleY(0.3); rootPane.getChildren().add(selectedIcon); initToolsPanels(); initToolBar(); /** Event listenters */ // Wire operations Joint.setOnClickListenter(event ->{ Joint joint = (Joint)event.getSource(); if(event.getButton() == MouseButton.PRIMARY) { // Replace existing one if(joint.isAttached()){ Wire old = joint.getWire(); joint.attachWire(wire); wire = old; // Or create / attach new wire } else { if (wire == null) { wire = new Wire(); joint.attachWire(wire); wire.reposition(joint.getAbsX(), joint.getAbsY()); workspace.addWire(wire); } else { joint.attachWire(wire); wire = null; } } projectChanged(); } }); // Block removement Block.setOnClickListenter(event -> { if (selectedTrash) { Block block = (Block)event.getSource(); workspace.removeBlock(block); projectChanged(); } }); // Wire removement Wire.setOnClickListenter(event -> { if (selectedTrash) { Wire wire = (Wire)event.getSource(); workspace.removeWire(wire); projectChanged(); } }); // Add new objects to workspace rootPane.setOnMouseClicked(event -> { if(hasOpenedPanes()) closeAllToolPanes(); else if(event.getButton() == MouseButton.PRIMARY) { if(selected != null) { Block block = selected.getInstance() .setPosition(workspace.projectX(event.getX()), workspace.projectY(event.getY())); workspace.addBlock(block); projectChanged(); } } else if(event.getButton() == MouseButton.SECONDARY) { // Deselect current block type deselect(); // Remove current uncomplete wire if(wire != null){ workspace.removeWire(wire); wire = null; } } }); // Move mouse tool icon & current wire loose end (if any) rootPane.setOnMouseMoved(event -> { selectedIcon.setTranslateX(event.getSceneX()); selectedIcon.setTranslateY(event.getSceneY()); if(wire != null) wire.reposition(workspace.projectX(event.getX()), workspace.projectY(event.getY())); }); rootPane.addEventFilter(MouseEvent.MOUSE_DRAGGED, rootPane.getOnMouseMoved()); // Keyboard tool selector rootPane.setOnKeyPressed(event -> { if(event.getCode() == KeyCode.DIGIT1) toggleToolPane(0); if(event.getCode() == KeyCode.DIGIT2) toggleToolPane(1); if(event.getCode() == KeyCode.DIGIT3) toggleToolPane(2); if(event.getCode() == KeyCode.DIGIT4) toggleToolPane(3); }); } /** UI generation */ private void initToolsPanels(){ tools = new ToolPane[] { new ToolPane("Точки входа"), new ToolPane("Действия"), new ToolPane("Циклы"), new ToolPane("Логика") }; for(Blocks.Id id: Blocks.Id.values()){ ToolButton tool = new ToolButton(id.name, Assets.toolIcons[id.id]); tool.setOnMouseClicked(event -> { select(id); closeAllToolPanes(); }); tools[id.category.ordinal()].add(tool); } for(ToolPane pane: tools) rootPane.getChildren().add(pane); } private void initToolBar(){ for(int c = 0; c < 4; c++) { Button button = newToolBarButton(Assets.toolBarIcon[c+1]); int id = c; button.setOnAction(event -> toggleToolPane(id)); toolBar.getChildren().add(button); } Button trash = newToolBarButton(Assets.toolBarIcon[5]); trash.setOnAction(event -> selectTrashTool()); toolBar.getChildren().add(trash); } private Button newToolBarButton(Image image){ Button button = new Button(); button.setId("tool"); button.setGraphic(new ImageView(image)); return button; } /** Tool actions */ public void selectTrashTool(){ deselect(); selectedTrash = true; selectedIcon.setImage(Assets.toolBarIcon[5]); } public void select(Blocks.Id id){ deselect(); selected = id; selectedIcon.setImage(Assets.toolIcons[id.id]); } public void deselect(){ selected = null; selectedTrash = false; selectedIcon.setImage(null); } /** Block panel actions */ public void closeAllToolPanes(){ toggleToolPane(-1); } public void toggleToolPane(int id){ for(int c = 0; c < tools.length; c++) if(c == id) tools[c].toggle(); else tools[c].close(); } public boolean hasOpenedPanes(){ for(ToolPane pane: tools) if(pane.isOpen()) return true; return false; } /** Project file management */ private void save() { String data = JSON.generate(workspace).toJSONString(); try (FileWriter writer = new FileWriter(projectFile)) { writer.write(data); Log.out("Successfully Copied JSON Object to File..."); Log.out("JSON Object: " + data); } catch (IOException e) { Log.error("Project saving error", e); error("Ошибка записи проекта", "В силу неведомых причин, сериализация проекта в JSON прошла неудачно.\n" + "Проверьте, есть ли свободное место на диске, и имеет ли программа права на запись" + "в выбранном каталоге.", e); } } private void load() { JSONParser parser = new JSONParser(); try (FileReader reader = new FileReader(projectFile)) { JSONObject data = (JSONObject) parser.parse(reader); JSON.recreate(workspace, data); } catch (Exception e) { Log.error("Error loading project", e); error("Ошибка открытия проекта", "Вероятнее всего, файл, который вы пытаетесь открыть - поврежден.", e); } } /** Menu actions */ public void newProject() { workspace.clear(); projectFile = null; setTitle(Settings.UNTITLED); } public void openProject(){ FileChooser fileChooser = new FileChooser(); fileChooser.setTitle("Открыть..."); fileChooser.setSelectedExtensionFilter( new FileChooser.ExtensionFilter("Проект OcBlocks", "*."+Settings.EXTENSION)); projectFile = fileChooser.showOpenDialog(parentStage); load(); setTitle(projectFile.getName()); } public void saveProject() { if(projectFile == null) saveProjectAs(); else save(); } public void saveProjectAs() { FileChooser fileChooser = new FileChooser(); fileChooser.setTitle("Сохранить как..."); fileChooser.setInitialFileName(projectFile == null ? Settings.UNTITLED + "." + Settings.EXTENSION : projectFile.getName()); fileChooser.setSelectedExtensionFilter( new FileChooser.ExtensionFilter("Проект OcBlocks", "*."+Settings.EXTENSION)); projectFile = fileChooser.showSaveDialog(parentStage); save(); setTitle(projectFile.getName()); } public void showAboutWindow() { about.show(); } public void exit() { parentStage.fireEvent(new WindowEvent(parentStage, WindowEvent.WINDOW_CLOSE_REQUEST)); } /** Messages */ public void error(String title, String message, Exception e) { Alert alert = new Alert(Alert.AlertType.ERROR); alert.setTitle(title); alert.setHeaderText("Упс! Что-то пошло не так..."); alert.setContentText(message); if(e != null) { // Create expandable Exception. StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); e.printStackTrace(pw); String exceptionText = sw.toString(); Label label = new Label("Стектрейс (отправьте разработчику):"); TextArea textArea = new TextArea(exceptionText); textArea.setEditable(false); textArea.setWrapText(true); textArea.setMaxWidth(Double.MAX_VALUE); textArea.setMaxHeight(Double.MAX_VALUE); GridPane.setVgrow(textArea, Priority.ALWAYS); GridPane.setHgrow(textArea, Priority.ALWAYS); GridPane expContent = new GridPane(); expContent.setMaxWidth(Double.MAX_VALUE); expContent.add(label, 0, 0); expContent.add(textArea, 0, 1); // Set expandable Exception into the dialog pane. alert.getDialogPane().setExpandableContent(expContent); } alert.showAndWait(); } }
package com.oracle.truffle.api.source; import java.io.*; import java.util.*; import com.oracle.truffle.api.*; /** * A representation of source code information, suitable for hash table keys with equality defined * in terms of content. There are three kinds of sources supported at present. * <ul> * <li><strong>File:</strong> Each file is represented as a canonical object, indexed by the * absolute, canonical path name of the file. The textual contents of the file may be supplied when * the object is created, or it may be read lazily. Only one lazy attempt will be made to read a * file, and failure will result silently in null content.</li> * <li><strong>Literal Source:</strong> A named text string, whose contents are supplied concretely * (possibly via an {@link InputStream}), can also be used as a source. These are represented as * value objects whose equality depends on both name and contents.</li> * <li><strong>Fake Files:</strong> A named text string used for testing; its contents can be * retrieved by name, unlike literal sources.</li> * </ul> * <p> * <strong>Cache:</strong> * <ol> * <li>Access to source file contents via {@link Source#getInputStream()} or * {@link Source#getReader()} does <em>not</em> by itself result in the file's contents being cached * in the {@link Source} object.</li> * <li>Access to source file contents via {@link Source#getCode()} or any other {@link Source} * methods related to file's contents <em>will</em> result in the contents being cached in the * {@link Source} object.</li> * <li>Once source file contents have been cached, access to source file contents via * {@link Source#getInputStream()} or {@link Source#getReader()} will be provided from the cache.</li> * <li>Any access to source file contents via the cache will result in a timestamp check and * possible cache reload.</li> * </ol> */ public final class SourceManager { // Only files and fake files are indexed. private final Map<String, SourceImpl> pathToSource = new HashMap<>(); public SourceManager() { } /** * Gets the canonical representation of a source file, whose contents will be read lazily and * then cached. * * @param reset forces any existing {@link Source} cache to be cleared, forcing a re-read */ public Source get(String fileName, boolean reset) { SourceImpl source = pathToSource.get(fileName); if (source == null) { final File file = new File(fileName); String path = null; if (file.exists()) { try { path = file.getCanonicalPath(); } catch (IOException e) { throw new RuntimeException("Can't find file " + fileName); } } source = pathToSource.get(path); if (source == null) { source = new FileSourceImpl(file, fileName, path); pathToSource.put(path, source); } } if (reset) { source.reset(); } return source; } /** * Gets the canonical representation of a source file, whose contents will be read lazily and * then cached. */ public Source get(String fileName) { return get(fileName, false); } /** * Creates a source from literal text. */ @SuppressWarnings("static-method") public Source get(String name, String code) { assert code != null; return new LiteralSourceImpl(name, code); } /** * Creates a source whose contents will be read immediately and cached. */ @SuppressWarnings("static-method") public Source get(String name, InputStream stream) throws IOException { InputStreamReader reader = new InputStreamReader(stream); return new LiteralSourceImpl(name, readCode(reader)); } /** * Creates a source from literal text, but which acts as a file and can be retrieved by name * (unlike other literal sources); intended for testing. */ public Source getFakeFile(String name, String code) { final SourceImpl source = new LiteralSourceImpl(name, code); pathToSource.put(name, source); return source; } private static String readCode(Reader reader) throws IOException { final StringBuilder builder = new StringBuilder(); final char[] buffer = new char[1024]; while (true) { final int n = reader.read(buffer); if (n == -1) { break; } builder.append(buffer, 0, n); } return builder.toString(); } private abstract static class SourceImpl implements Source { protected TextMap textMap = null; protected abstract void reset(); public final InputStream getInputStream() { return new ByteArrayInputStream(getCode().getBytes()); } /** * Gets the text (not including a possible terminating newline) in a (1-based) numbered * line. */ public final String getCode(int lineNumber) { checkTextMap(); final int offset = textMap.lineStartOffset(lineNumber); final int length = textMap.lineLength(lineNumber); return getCode().substring(offset, offset + length); } /** * The number of text lines in the source. */ public final int getLineCount() { return checkTextMap().lineCount(); } /** * The 1-based number of the line that includes a 0-based character offset. */ public final int getLineNumber(int offset) { return checkTextMap().offsetToLine(offset); } /** * The 0-based character offset at the start of a (1-based) numbered line. */ public final int getLineStartOffset(int lineNumber) { return checkTextMap().lineStartOffset(lineNumber); } /** * The number of characters (not counting a possible terminating newline) in a (1-based) * numbered line. */ public final int getLineLength(int lineNumber) { return checkTextMap().lineLength(lineNumber); } private TextMap checkTextMap() { if (textMap == null) { final String code = getCode(); if (code == null) { throw new RuntimeException("can't read file " + getName()); } textMap = new TextMap(code); } return textMap; } } public static class LiteralSourceImpl extends SourceImpl { private final String name; // Name used originally to describe the source private final String code; public LiteralSourceImpl(String name, String code) { this.name = name; this.code = code; } @Override public String getName() { return name; } @Override public String getCode() { return code; } @Override public String getPath() { return name; } @Override public Reader getReader() { return new StringReader(code); } @Override protected void reset() { } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + name.hashCode(); result = prime * result + (code == null ? 0 : code.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (!(obj instanceof LiteralSourceImpl)) { return false; } LiteralSourceImpl other = (LiteralSourceImpl) obj; return name.equals(other.name) && code.equals(other.code); } } private static class FileSourceImpl extends SourceImpl { private final File file; private final String name; // Name used originally to describe the source private final String path; // Normalized path description of an actual file private String code = null; // A cache of the file's contents private long timeStamp; // timestamp of the cache in the file system public FileSourceImpl(File file, String name, String path) { this.file = file; this.name = name; this.path = path; } @Override public String getName() { return name; } @Override public String getCode() { if (code == null || timeStamp != file.lastModified()) { try { code = readCode(getReader()); timeStamp = file.lastModified(); } catch (IOException e) { } } return code; } @Override public String getPath() { return path; } @Override public Reader getReader() { if (code != null && timeStamp == file.lastModified()) { return new StringReader(code); } try { return new FileReader(file); } catch (FileNotFoundException e) { throw new RuntimeException("Can't find file " + path); } } @Override protected void reset() { this.code = null; } } }
package org.getcomposer.core; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import com.google.gson.Gson; import com.google.gson.GsonBuilder; /** * Represents a composer package. The source can either be a composer.json file * or a json response from packagist.org. * * See fromJson / fromPackagist for details. * * @author Robert Gruendler <r.gruendler@gmail.com> * */ public class PHPPackage extends ObservableModel implements PackageInterface { public String name; public String type; public String description; public String homepage; public String url; public String fullPath; public String minimumStability; public Map<String, String> require; public Map<String, String> requireDev; public Autoload autoload; public String targetDir; public String version; public String versionNormalized; public License license; public String[] keywords; public Map<String, PHPPackage> versions; public ArrayList<Author> authors; public PHPPackage() { authors = new ArrayList<Author>(); require = new HashMap<String, String>(); requireDev = new HashMap<String, String>(); versions = new HashMap<String, PHPPackage>(); } public String toString() { return name; } /** * Deserializes a package from a composer.json file * * @param input * @return {@link PHPPackage} the deserialized package * @throws FileNotFoundException */ public static PHPPackage fromJson(File input) throws FileNotFoundException { Gson gson = getBuilder(); InputStream stream = new FileInputStream(input); InputStreamReader reader = new InputStreamReader(stream); PHPPackage pHPPackage = gson.fromJson(reader, PHPPackage.class); pHPPackage.fullPath = input.getAbsolutePath(); return pHPPackage; } public static PHPPackage fromPackagist(File input) throws FileNotFoundException { Gson gson = getBuilder(); InputStream stream = new FileInputStream(input); InputStreamReader reader = new InputStreamReader(stream); PackagistPackage packagistPackage = gson.fromJson(reader, PackagistPackage.class); return packagistPackage.phpPackage; } /** * Retrieve a Gson with the proper TypeAdapters and FieldNamingStrategy * * @return {@link Gson} */ public static Gson getBuilder() { return new GsonBuilder() .registerTypeAdapter(License.class, new LicenseDeserializer()) .setFieldNamingStrategy(new ComposerFieldNamingStrategy()) .create(); } /* * (non-Javadoc) * * @see org.getcomposer.core.PackageInterface#getDefaultVersion() */ public String getDefaultVersion() { return versions.keySet().iterator().next(); } /* * (non-Javadoc) * * @see * org.getcomposer.core.PackageInterface#getPackageName(java.lang.String) */ public String getPackageName(String version) throws Exception { if (!versions.containsKey(version)) { throw new Exception("Invalid version " + version + " for package " + name); } return String.format("%s:%s", name, version); } /** * * Helper class for deserializing a packagist.org json object. * * @author Robert Gruendler <r.gruendler@gmail.com> * */ public class PackagistPackage { public PHPPackage phpPackage; } /* * (non-Javadoc) * * @see org.getcomposer.core.PackageInterface#getName() */ public String getName() { return name; } /* * (non-Javadoc) * * @see org.getcomposer.core.PackageInterface#getType() */ public String getType() { return type; } /* * (non-Javadoc) * * @see org.getcomposer.core.PackageInterface#getDescription() */ public String getDescription() { return description; } /* * (non-Javadoc) * * @see org.getcomposer.core.PackageInterface#getHomepage() */ public String getHomepage() { return homepage; } /* * (non-Javadoc) * * @see org.getcomposer.core.PackageInterface#getUrl() */ public String getUrl() { return url; } /* * (non-Javadoc) * * @see org.getcomposer.core.PackageInterface#getFullPath() */ public String getFullPath() { return fullPath; } /* * (non-Javadoc) * * @see org.getcomposer.core.PackageInterface#getRequire() */ public Map<String, String> getRequire() { return require; } /* * (non-Javadoc) * * @see org.getcomposer.core.PackageInterface#getRequireDev() */ public Map<String, String> getRequireDev() { return requireDev; } /* * (non-Javadoc) * * @see org.getcomposer.core.PackageInterface#getAutoload() */ public Autoload getAutoload() { return autoload; } /* * (non-Javadoc) * * @see org.getcomposer.core.PackageInterface#getTargetDir() */ public String getTargetDir() { return targetDir; } /* * (non-Javadoc) * * @see org.getcomposer.core.PackageInterface#getVersion() */ public String getVersion() { return version; } public String getVersionNormalized() { return versionNormalized; } public License getLicense() { return license; } /* * (non-Javadoc) * * @see org.getcomposer.core.PackageInterface#getKeywords() */ public String[] getKeywords() { return keywords; } /* * (non-Javadoc) * * @see org.getcomposer.core.PackageInterface#getVersions() */ public Map<String, PHPPackage> getVersions() { return versions; } /* * (non-Javadoc) * * @see org.getcomposer.core.PackageInterface#getAuthors() */ public List<Author> getAuthors() { return authors; } public String getMinimumStability() { return minimumStability; } @SuppressWarnings("unchecked") public void addAuthor(Author author) { ArrayList<Author> authors = (ArrayList<Author>) this.authors.clone(); this.authors.add(author); firePropertyChange("authors", authors, this.authors); } @SuppressWarnings("unchecked") public void removeAuthor(Author author) { ArrayList<Author> authors = (ArrayList<Author>) this.authors.clone(); this.authors.remove(author); firePropertyChange("authors", authors, this.authors); } public void setAuthors(List<Author> authors) { firePropertyChange("authors", this.authors, this.authors = (ArrayList<Author>)authors); } /** * @param name the name to set */ public void setName(String name) { firePropertyChange("name", this.name, this.name = name); } /** * @param type the type to set */ public void setType(String type) { firePropertyChange("type", this.type, this.type = type); } /** * @param description the description to set */ public void setDescription(String description) { firePropertyChange("description", this.description, this.description = description); } /** * @param homepage the homepage to set */ public void setHomepage(String homepage) { firePropertyChange("homepage", this.homepage, this.homepage = homepage); } /** * @param url the url to set */ public void setUrl(String url) { firePropertyChange("url", this.url, this.url = url); } /** * @param minimumStability the minimumStability to set */ public void setMinimumStability(String minimumStability) { firePropertyChange("minimumStability", this.minimumStability, this.minimumStability = minimumStability); } /** * @param require the require to set */ public void setRequire(Map<String, String> require) { firePropertyChange("require", this.require, this.require = require); } public void addRequire(String phpPackage, String version) { Map<String, String> oldRequire = new HashMap <String, String>(require); require.put(phpPackage, version); firePropertyChange("require", oldRequire, require); } public void removeRequire(String phpPackage) { Map<String, String> oldRequire = new HashMap <String, String>(require); require.remove(phpPackage); firePropertyChange("require", oldRequire, require); } /** * @param requireDev the requireDev to set */ public void setRequireDev(Map<String, String> requireDev) { firePropertyChange("requireDev", this.requireDev, this.requireDev = requireDev); } public void addRequireDev(String phpPackage, String version) { Map<String, String> oldRequireDev = new HashMap <String, String>(requireDev); requireDev.put(phpPackage, version); firePropertyChange("requireDev", oldRequireDev, requireDev); } public void removeRequireDev(String phpPackage) { Map<String, String> oldRequireDev = new HashMap <String, String>(requireDev); requireDev.remove(phpPackage); firePropertyChange("requireDev", oldRequireDev, requireDev); } /** * @param targetDir the targetDir to set */ public void setTargetDir(String targetDir) { firePropertyChange("targetDir", this.targetDir, this.targetDir = targetDir); } /** * @param keywords the keywords to set */ public void setKeywords(String[] keywords) { firePropertyChange("keywords", this.keywords, this.keywords = keywords); } }
package org.ccci.gto.android.common.db; import org.junit.Test; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; public class BaseContractTest { @Test(expected = IllegalArgumentException.class) public void verifyUniqueIndexNoFields() throws Exception { BaseContract.uniqueIndex(); } @Test public void verifyUniqueIndex() throws Exception { assertThat(BaseContract.uniqueIndex("field1").trim(), is("UNIQUE(field1)")); assertThat(BaseContract.uniqueIndex("field1", "field2").trim(), is("UNIQUE(field1,field2)")); } }
package org.jboss.as.console.client.shared.subsys.jgroups; import java.util.HashMap; import java.util.Map; public enum Protocol { UNKNOWN(null), UDP("UDP"), TCP("TCP"), TCP_GOSSIP("TCP_GOSSIP"), TCP_PING("TCPPING"), AUTH("AUTH"), PING("PING"), MPING("MPING"), MERGE2("MERGE2"), FD_SOCK("FD_SOCK"), FD("FD"), VERIFY_SUSPECT("VERIFY_SUSPECT"), BARRIER("BARRIER"), NAKACK("pbcast.NAKACK"), UNICAST2("UNICAST2"), STABLE("pbcast.STABLE"), GMS("pbcast.GMS"), UFC("UFC"), MFC("MFC"), FRAG2("FRAG2"), STATE_TRANSFER("pbcast.STATE_TRANSFER"), FLUSH("pbcast.FLUSH"), ; private final String name; Protocol(final String name) { this.name = name; } /** * Get the local name of this protocol. * * @return the local name */ public String getLocalName() { return name; } private static final Map<String, Protocol> elements; static { final Map<String, Protocol> map = new HashMap<String, Protocol>(); for (Protocol element : values()) { final String name = element.getLocalName(); if (name != null) map.put(name, element); } elements = map; } public static Protocol forName(String localName) { final Protocol element = elements.get(localName); return element == null ? UNKNOWN : element; } }
package org.jsoup.helper; import org.jsoup.Connection; import org.jsoup.HttpStatusException; import org.jsoup.UncheckedIOException; import org.jsoup.UnsupportedMimeTypeException; import org.jsoup.internal.ConstrainableInputStream; import org.jsoup.internal.StringUtil; import org.jsoup.nodes.Document; import org.jsoup.parser.Parser; import org.jsoup.parser.TokenQueue; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLSocketFactory; import java.io.BufferedInputStream; import java.io.BufferedWriter; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.net.HttpURLConnection; import java.net.InetSocketAddress; import java.net.MalformedURLException; import java.net.Proxy; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLEncoder; import java.nio.Buffer; import java.nio.ByteBuffer; import java.nio.charset.Charset; import java.nio.charset.IllegalCharsetNameException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.regex.Pattern; import java.util.zip.GZIPInputStream; import java.util.zip.Inflater; import java.util.zip.InflaterInputStream; import static org.jsoup.Connection.Method.HEAD; import static org.jsoup.internal.Normalizer.lowerCase; /** * Implementation of {@link Connection}. * @see org.jsoup.Jsoup#connect(String) */ public class HttpConnection implements Connection { public static final String CONTENT_ENCODING = "Content-Encoding"; /** * Many users would get caught by not setting a user-agent and therefore getting different responses on their desktop * vs in jsoup, which would otherwise default to {@code Java}. So by default, use a desktop UA. */ public static final String DEFAULT_UA = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36"; private static final String USER_AGENT = "User-Agent"; public static final String CONTENT_TYPE = "Content-Type"; public static final String MULTIPART_FORM_DATA = "multipart/form-data"; public static final String FORM_URL_ENCODED = "application/x-www-form-urlencoded"; private static final int HTTP_TEMP_REDIR = 307; // http/1.1 temporary redirect, not in Java's set. private static final String DefaultUploadType = "application/octet-stream"; private static final Charset UTF_8 = Charset.forName("UTF-8"); // Don't use StandardCharsets, not in Android API 10. private static final Charset ISO_8859_1 = Charset.forName("ISO-8859-1"); public static Connection connect(String url) { Connection con = new HttpConnection(); con.url(url); return con; } public static Connection connect(URL url) { Connection con = new HttpConnection(); con.url(url); return con; } public HttpConnection() { req = new Request(); res = new Response(); } /** * Encodes the input URL into a safe ASCII URL string * @param url unescaped URL * @return escaped URL */ private static String encodeUrl(String url) { try { URL u = new URL(url); return encodeUrl(u).toExternalForm(); } catch (Exception e) { return url; } } static URL encodeUrl(URL u) { try { // odd way to encode urls, but it works! String urlS = u.toExternalForm(); urlS = urlS.replace(" ", "%20"); final URI uri = new URI(urlS); return new URL(uri.toASCIIString()); } catch (URISyntaxException | MalformedURLException e) { // give up and return the original input return u; } } private static String encodeMimeName(String val) { if (val == null) return null; return val.replace("\"", "%22"); } private Connection.Request req; private Connection.Response res; public Connection url(URL url) { req.url(url); return this; } public Connection url(String url) { Validate.notEmpty(url, "Must supply a valid URL"); try { req.url(new URL(encodeUrl(url))); } catch (MalformedURLException e) { throw new IllegalArgumentException("Malformed URL: " + url, e); } return this; } public Connection proxy(Proxy proxy) { req.proxy(proxy); return this; } public Connection proxy(String host, int port) { req.proxy(host, port); return this; } public Connection userAgent(String userAgent) { Validate.notNull(userAgent, "User agent must not be null"); req.header(USER_AGENT, userAgent); return this; } public Connection timeout(int millis) { req.timeout(millis); return this; } public Connection maxBodySize(int bytes) { req.maxBodySize(bytes); return this; } public Connection followRedirects(boolean followRedirects) { req.followRedirects(followRedirects); return this; } public Connection referrer(String referrer) { Validate.notNull(referrer, "Referrer must not be null"); req.header("Referer", referrer); return this; } public Connection method(Method method) { req.method(method); return this; } public Connection ignoreHttpErrors(boolean ignoreHttpErrors) { req.ignoreHttpErrors(ignoreHttpErrors); return this; } public Connection ignoreContentType(boolean ignoreContentType) { req.ignoreContentType(ignoreContentType); return this; } public Connection data(String key, String value) { req.data(KeyVal.create(key, value)); return this; } public Connection sslSocketFactory(SSLSocketFactory sslSocketFactory) { req.sslSocketFactory(sslSocketFactory); return this; } public Connection data(String key, String filename, InputStream inputStream) { req.data(KeyVal.create(key, filename, inputStream)); return this; } @Override public Connection data(String key, String filename, InputStream inputStream, String contentType) { req.data(KeyVal.create(key, filename, inputStream).contentType(contentType)); return this; } public Connection data(Map<String, String> data) { Validate.notNull(data, "Data map must not be null"); for (Map.Entry<String, String> entry : data.entrySet()) { req.data(KeyVal.create(entry.getKey(), entry.getValue())); } return this; } public Connection data(String... keyvals) { Validate.notNull(keyvals, "Data key value pairs must not be null"); Validate.isTrue(keyvals.length %2 == 0, "Must supply an even number of key value pairs"); for (int i = 0; i < keyvals.length; i += 2) { String key = keyvals[i]; String value = keyvals[i+1]; Validate.notEmpty(key, "Data key must not be empty"); Validate.notNull(value, "Data value must not be null"); req.data(KeyVal.create(key, value)); } return this; } public Connection data(Collection<Connection.KeyVal> data) { Validate.notNull(data, "Data collection must not be null"); for (Connection.KeyVal entry: data) { req.data(entry); } return this; } public Connection.KeyVal data(String key) { Validate.notEmpty(key, "Data key must not be empty"); for (Connection.KeyVal keyVal : request().data()) { if (keyVal.key().equals(key)) return keyVal; } return null; } public Connection requestBody(String body) { req.requestBody(body); return this; } public Connection header(String name, String value) { req.header(name, value); return this; } public Connection headers(Map<String,String> headers) { Validate.notNull(headers, "Header map must not be null"); for (Map.Entry<String,String> entry : headers.entrySet()) { req.header(entry.getKey(),entry.getValue()); } return this; } public Connection cookie(String name, String value) { req.cookie(name, value); return this; } public Connection cookies(Map<String, String> cookies) { Validate.notNull(cookies, "Cookie map must not be null"); for (Map.Entry<String, String> entry : cookies.entrySet()) { req.cookie(entry.getKey(), entry.getValue()); } return this; } public Connection parser(Parser parser) { req.parser(parser); return this; } public Document get() throws IOException { req.method(Method.GET); execute(); return res.parse(); } public Document post() throws IOException { req.method(Method.POST); execute(); return res.parse(); } public Connection.Response execute() throws IOException { res = Response.execute(req); return res; } public Connection.Request request() { return req; } public Connection request(Connection.Request request) { req = request; return this; } public Connection.Response response() { return res; } public Connection response(Connection.Response response) { res = response; return this; } public Connection postDataCharset(String charset) { req.postDataCharset(charset); return this; } @SuppressWarnings({"unchecked"}) private static abstract class Base<T extends Connection.Base> implements Connection.Base<T> { URL url; Method method; Map<String, List<String>> headers; Map<String, String> cookies; private Base() { headers = new LinkedHashMap<>(); cookies = new LinkedHashMap<>(); } public URL url() { return url; } public T url(URL url) { Validate.notNull(url, "URL must not be null"); this.url = url; return (T) this; } public Method method() { return method; } public T method(Method method) { Validate.notNull(method, "Method must not be null"); this.method = method; return (T) this; } public String header(String name) { Validate.notNull(name, "Header name must not be null"); List<String> vals = getHeadersCaseInsensitive(name); if (vals.size() > 0) { // https://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2 return StringUtil.join(vals, ", "); } return null; } @Override public T addHeader(String name, String value) { Validate.notEmpty(name); value = value == null ? "" : value; List<String> values = headers(name); if (values.isEmpty()) { values = new ArrayList<>(); headers.put(name, values); } values.add(fixHeaderEncoding(value)); return (T) this; } @Override public List<String> headers(String name) { Validate.notEmpty(name); return getHeadersCaseInsensitive(name); } private static String fixHeaderEncoding(String val) { byte[] bytes = val.getBytes(ISO_8859_1); if (!looksLikeUtf8(bytes)) return val; return new String(bytes, UTF_8); } private static boolean looksLikeUtf8(byte[] input) { int i = 0; // BOM: if (input.length >= 3 && (input[0] & 0xFF) == 0xEF && (input[1] & 0xFF) == 0xBB & (input[2] & 0xFF) == 0xBF) { i = 3; } int end; for (int j = input.length; i < j; ++i) { int o = input[i]; if ((o & 0x80) == 0) { continue; // ASCII } // UTF-8 leading: if ((o & 0xE0) == 0xC0) { end = i + 1; } else if ((o & 0xF0) == 0xE0) { end = i + 2; } else if ((o & 0xF8) == 0xF0) { end = i + 3; } else { return false; } if (end >= input.length) return false; while (i < end) { i++; o = input[i]; if ((o & 0xC0) != 0x80) { return false; } } } return true; } public T header(String name, String value) { Validate.notEmpty(name, "Header name must not be empty"); removeHeader(name); // ensures we don't get an "accept-encoding" and a "Accept-Encoding" addHeader(name, value); return (T) this; } public boolean hasHeader(String name) { Validate.notEmpty(name, "Header name must not be empty"); return !getHeadersCaseInsensitive(name).isEmpty(); } /** * Test if the request has a header with this value (case insensitive). */ public boolean hasHeaderWithValue(String name, String value) { Validate.notEmpty(name); Validate.notEmpty(value); List<String> values = headers(name); for (String candidate : values) { if (value.equalsIgnoreCase(candidate)) return true; } return false; } public T removeHeader(String name) { Validate.notEmpty(name, "Header name must not be empty"); Map.Entry<String, List<String>> entry = scanHeaders(name); // remove is case insensitive too if (entry != null) headers.remove(entry.getKey()); // ensures correct case return (T) this; } public Map<String, String> headers() { LinkedHashMap<String, String> map = new LinkedHashMap<>(headers.size()); for (Map.Entry<String, List<String>> entry : headers.entrySet()) { String header = entry.getKey(); List<String> values = entry.getValue(); if (values.size() > 0) map.put(header, values.get(0)); } return map; } @Override public Map<String, List<String>> multiHeaders() { return headers; } private List<String> getHeadersCaseInsensitive(String name) { Validate.notNull(name); for (Map.Entry<String, List<String>> entry : headers.entrySet()) { if (name.equalsIgnoreCase(entry.getKey())) return entry.getValue(); } return Collections.emptyList(); } private Map.Entry<String, List<String>> scanHeaders(String name) { String lc = lowerCase(name); for (Map.Entry<String, List<String>> entry : headers.entrySet()) { if (lowerCase(entry.getKey()).equals(lc)) return entry; } return null; } public String cookie(String name) { Validate.notEmpty(name, "Cookie name must not be empty"); return cookies.get(name); } public T cookie(String name, String value) { Validate.notEmpty(name, "Cookie name must not be empty"); Validate.notNull(value, "Cookie value must not be null"); cookies.put(name, value); return (T) this; } public boolean hasCookie(String name) { Validate.notEmpty(name, "Cookie name must not be empty"); return cookies.containsKey(name); } public T removeCookie(String name) { Validate.notEmpty(name, "Cookie name must not be empty"); cookies.remove(name); return (T) this; } public Map<String, String> cookies() { return cookies; } } public static class Request extends HttpConnection.Base<Connection.Request> implements Connection.Request { private Proxy proxy; // nullable private int timeoutMilliseconds; private int maxBodySizeBytes; private boolean followRedirects; private Collection<Connection.KeyVal> data; private String body = null; private boolean ignoreHttpErrors = false; private boolean ignoreContentType = false; private Parser parser; private boolean parserDefined = false; // called parser(...) vs initialized in ctor private String postDataCharset = DataUtil.defaultCharsetName; private SSLSocketFactory sslSocketFactory; Request() { timeoutMilliseconds = 30000; // 30 seconds maxBodySizeBytes = 1024 * 1024 * 2; // 2MB followRedirects = true; data = new ArrayList<>(); method = Method.GET; addHeader("Accept-Encoding", "gzip"); addHeader(USER_AGENT, DEFAULT_UA); parser = Parser.htmlParser(); } public Proxy proxy() { return proxy; } public Request proxy(Proxy proxy) { this.proxy = proxy; return this; } public Request proxy(String host, int port) { this.proxy = new Proxy(Proxy.Type.HTTP, InetSocketAddress.createUnresolved(host, port)); return this; } public int timeout() { return timeoutMilliseconds; } public Request timeout(int millis) { Validate.isTrue(millis >= 0, "Timeout milliseconds must be 0 (infinite) or greater"); timeoutMilliseconds = millis; return this; } public int maxBodySize() { return maxBodySizeBytes; } public Connection.Request maxBodySize(int bytes) { Validate.isTrue(bytes >= 0, "maxSize must be 0 (unlimited) or larger"); maxBodySizeBytes = bytes; return this; } public boolean followRedirects() { return followRedirects; } public Connection.Request followRedirects(boolean followRedirects) { this.followRedirects = followRedirects; return this; } public boolean ignoreHttpErrors() { return ignoreHttpErrors; } public SSLSocketFactory sslSocketFactory() { return sslSocketFactory; } public void sslSocketFactory(SSLSocketFactory sslSocketFactory) { this.sslSocketFactory = sslSocketFactory; } public Connection.Request ignoreHttpErrors(boolean ignoreHttpErrors) { this.ignoreHttpErrors = ignoreHttpErrors; return this; } public boolean ignoreContentType() { return ignoreContentType; } public Connection.Request ignoreContentType(boolean ignoreContentType) { this.ignoreContentType = ignoreContentType; return this; } public Request data(Connection.KeyVal keyval) { Validate.notNull(keyval, "Key val must not be null"); data.add(keyval); return this; } public Collection<Connection.KeyVal> data() { return data; } public Connection.Request requestBody(String body) { this.body = body; return this; } public String requestBody() { return body; } public Request parser(Parser parser) { this.parser = parser; parserDefined = true; return this; } public Parser parser() { return parser; } public Connection.Request postDataCharset(String charset) { Validate.notNull(charset, "Charset must not be null"); if (!Charset.isSupported(charset)) throw new IllegalCharsetNameException(charset); this.postDataCharset = charset; return this; } public String postDataCharset() { return postDataCharset; } } public static class Response extends HttpConnection.Base<Connection.Response> implements Connection.Response { private static final int MAX_REDIRECTS = 20; private static final String LOCATION = "Location"; private int statusCode; private String statusMessage; private ByteBuffer byteData; private InputStream bodyStream; private HttpURLConnection conn; private String charset; private String contentType; private boolean executed = false; private boolean inputStreamRead = false; private int numRedirects = 0; private Connection.Request req; /* * Matches XML content types (like text/xml, application/xhtml+xml;charset=UTF8, etc) */ private static final Pattern xmlContentTypeRxp = Pattern.compile("(application|text)/\\w*\\+?xml.*"); Response() { super(); } private Response(Response previousResponse) throws IOException { super(); if (previousResponse != null) { numRedirects = previousResponse.numRedirects + 1; if (numRedirects >= MAX_REDIRECTS) throw new IOException(String.format("Too many redirects occurred trying to load URL %s", previousResponse.url())); } } static Response execute(Connection.Request req) throws IOException { return execute(req, null); } static Response execute(Connection.Request req, Response previousResponse) throws IOException { Validate.notNull(req, "Request must not be null"); Validate.notNull(req.url(), "URL must be specified to connect"); String protocol = req.url().getProtocol(); if (!protocol.equals("http") && !protocol.equals("https")) throw new MalformedURLException("Only http & https protocols supported"); final boolean methodHasBody = req.method().hasBody(); final boolean hasRequestBody = req.requestBody() != null; if (!methodHasBody) Validate.isFalse(hasRequestBody, "Cannot set a request body for HTTP method " + req.method()); // set up the request for execution String mimeBoundary = null; if (req.data().size() > 0 && (!methodHasBody || hasRequestBody)) serialiseRequestUrl(req); else if (methodHasBody) mimeBoundary = setOutputContentType(req); long startTime = System.nanoTime(); HttpURLConnection conn = createConnection(req); Response res = null; try { conn.connect(); if (conn.getDoOutput()) writePost(req, conn.getOutputStream(), mimeBoundary); int status = conn.getResponseCode(); res = new Response(previousResponse); res.setupFromConnection(conn, previousResponse); res.req = req; // redirect if there's a location header (from 3xx, or 201 etc) if (res.hasHeader(LOCATION) && req.followRedirects()) { if (status != HTTP_TEMP_REDIR) { req.method(Method.GET); // always redirect with a get. any data param from original req are dropped. req.data().clear(); req.requestBody(null); req.removeHeader(CONTENT_TYPE); } String location = res.header(LOCATION); if (location.startsWith("http:/") && location.charAt(6) != '/') // fix broken Location: http:/temp/AAG_New/en/index.php location = location.substring(6); URL redir = StringUtil.resolve(req.url(), location); req.url(encodeUrl(redir)); for (Map.Entry<String, String> cookie : res.cookies.entrySet()) { // add response cookies to request (for e.g. login posts) req.cookie(cookie.getKey(), cookie.getValue()); } return execute(req, res); } if ((status < 200 || status >= 400) && !req.ignoreHttpErrors()) throw new HttpStatusException("HTTP error fetching URL", status, req.url().toString()); // check that we can handle the returned content type; if not, abort before fetching it String contentType = res.contentType(); if (contentType != null && !req.ignoreContentType() && !contentType.startsWith("text/") && !xmlContentTypeRxp.matcher(contentType).matches() ) /** * Call on completion of stream read, to close the body (or error) stream. The connection.disconnect allows * keep-alives to work (as the underlying connection is actually held open, despite the name). */ private void safeClose() { if (bodyStream != null) { try { bodyStream.close(); } catch (IOException e) { // no-op } finally { bodyStream = null; } } if (conn != null) { conn.disconnect(); conn = null; } } // set up url, method, header, cookies private void setupFromConnection(HttpURLConnection conn, HttpConnection.Response previousResponse) throws IOException { this.conn = conn; method = Method.valueOf(conn.getRequestMethod()); url = conn.getURL(); statusCode = conn.getResponseCode(); statusMessage = conn.getResponseMessage(); contentType = conn.getContentType(); Map<String, List<String>> resHeaders = createHeaderMap(conn); processResponseHeaders(resHeaders); // if from a redirect, map previous response cookies into this response if (previousResponse != null) { for (Map.Entry<String, String> prevCookie : previousResponse.cookies().entrySet()) { if (!hasCookie(prevCookie.getKey())) cookie(prevCookie.getKey(), prevCookie.getValue()); } previousResponse.safeClose(); } } private static LinkedHashMap<String, List<String>> createHeaderMap(HttpURLConnection conn) { // the default sun impl of conn.getHeaderFields() returns header values out of order final LinkedHashMap<String, List<String>> headers = new LinkedHashMap<>(); int i = 0; while (true) { final String key = conn.getHeaderFieldKey(i); final String val = conn.getHeaderField(i); if (key == null && val == null) break; i++; if (key == null || val == null) continue; // skip http1.1 line if (headers.containsKey(key)) headers.get(key).add(val); else { final ArrayList<String> vals = new ArrayList<>(); vals.add(val); headers.put(key, vals); } } return headers; } void processResponseHeaders(Map<String, List<String>> resHeaders) { for (Map.Entry<String, List<String>> entry : resHeaders.entrySet()) { String name = entry.getKey(); if (name == null) continue; // http/1.1 line List<String> values = entry.getValue(); if (name.equalsIgnoreCase("Set-Cookie")) { for (String value : values) { if (value == null) continue; TokenQueue cd = new TokenQueue(value); String cookieName = cd.chompTo("=").trim(); String cookieVal = cd.consumeTo(";").trim(); // ignores path, date, domain, validateTLSCertificates et al. req'd? // name not blank, value not null if (cookieName.length() > 0) cookie(cookieName, cookieVal); } } for (String value : values) { addHeader(name, value); } } } private static String setOutputContentType(final Connection.Request req) { String bound = null; if (req.hasHeader(CONTENT_TYPE)) { // no-op; don't add content type as already set (e.g. for requestBody()) // todo - if content type already set, we could add charset // if user has set content type to multipart/form-data, auto add boundary. if(req.header(CONTENT_TYPE).contains(MULTIPART_FORM_DATA) && !req.header(CONTENT_TYPE).contains("boundary")) { bound = DataUtil.mimeBoundary(); req.header(CONTENT_TYPE, MULTIPART_FORM_DATA + "; boundary=" + bound); } } else if (needsMultipart(req)) { bound = DataUtil.mimeBoundary(); req.header(CONTENT_TYPE, MULTIPART_FORM_DATA + "; boundary=" + bound); } else { req.header(CONTENT_TYPE, FORM_URL_ENCODED + "; charset=" + req.postDataCharset()); } return bound; } private static void writePost(final Connection.Request req, final OutputStream outputStream, final String bound) throws IOException { final Collection<Connection.KeyVal> data = req.data(); final BufferedWriter w = new BufferedWriter(new OutputStreamWriter(outputStream, req.postDataCharset())); if (bound != null) { // boundary will be set if we're in multipart mode for (Connection.KeyVal keyVal : data) { w.write(" w.write(bound); w.write("\r\n"); w.write("Content-Disposition: form-data; name=\""); w.write(encodeMimeName(keyVal.key())); // encodes " to %22 w.write("\""); if (keyVal.hasInputStream()) { w.write("; filename=\""); w.write(encodeMimeName(keyVal.value())); w.write("\"\r\nContent-Type: "); w.write(keyVal.contentType() != null ? keyVal.contentType() : DefaultUploadType); w.write("\r\n\r\n"); w.flush(); // flush DataUtil.crossStreams(keyVal.inputStream(), outputStream); outputStream.flush(); } else { w.write("\r\n\r\n"); w.write(keyVal.value()); } w.write("\r\n"); } w.write(" w.write(bound); w.write(" } else if (req.requestBody() != null) { // data will be in query string, we're sending a plaintext body w.write(req.requestBody()); } else { // regular form data (application/x-www-form-urlencoded) boolean first = true; for (Connection.KeyVal keyVal : data) { if (!first) w.append('&'); else first = false; w.write(URLEncoder.encode(keyVal.key(), req.postDataCharset())); w.write('='); w.write(URLEncoder.encode(keyVal.value(), req.postDataCharset())); } } w.close(); } private static String getRequestCookieString(Connection.Request req) { StringBuilder sb = StringUtil.borrowBuilder(); boolean first = true; for (Map.Entry<String, String> cookie : req.cookies().entrySet()) { if (!first) sb.append("; "); else first = false; sb.append(cookie.getKey()).append('=').append(cookie.getValue()); // todo: spec says only ascii, no escaping / encoding defined. validate on set? or escape somehow here? } return StringUtil.releaseBuilder(sb); } // for get url reqs, serialise the data map into the url private static void serialiseRequestUrl(Connection.Request req) throws IOException { URL in = req.url(); StringBuilder url = StringUtil.borrowBuilder(); boolean first = true; // reconstitute the query, ready for appends url .append(in.getProtocol()) .append(": .append(in.getAuthority()) // includes host, port .append(in.getPath()) .append("?"); if (in.getQuery() != null) { url.append(in.getQuery()); first = false; } for (Connection.KeyVal keyVal : req.data()) { Validate.isFalse(keyVal.hasInputStream(), "InputStream data not supported in URL query string."); if (!first) url.append('&'); else first = false; url .append(URLEncoder.encode(keyVal.key(), DataUtil.defaultCharsetName)) .append('=') .append(URLEncoder.encode(keyVal.value(), DataUtil.defaultCharsetName)); } req.url(new URL(StringUtil.releaseBuilder(url))); req.data().clear(); // moved into url as get params } } private static boolean needsMultipart(Connection.Request req) { // multipart mode, for files. add the header if we see something with an inputstream, and return a non-null boundary for (Connection.KeyVal keyVal : req.data()) { if (keyVal.hasInputStream()) return true; } return false; } public static class KeyVal implements Connection.KeyVal { private String key; private String value; private InputStream stream; private String contentType; public static KeyVal create(String key, String value) { return new KeyVal().key(key).value(value); } public static KeyVal create(String key, String filename, InputStream stream) { return new KeyVal().key(key).value(filename).inputStream(stream); } private KeyVal() {} public KeyVal key(String key) { Validate.notEmpty(key, "Data key must not be empty"); this.key = key; return this; } public String key() { return key; } public KeyVal value(String value) { Validate.notNull(value, "Data value must not be null"); this.value = value; return this; } public String value() { return value; } public KeyVal inputStream(InputStream inputStream) { Validate.notNull(value, "Data input stream must not be null"); this.stream = inputStream; return this; } public InputStream inputStream() { return stream; } public boolean hasInputStream() { return stream != null; } @Override public Connection.KeyVal contentType(String contentType) { Validate.notEmpty(contentType); this.contentType = contentType; return this; } @Override public String contentType() { return contentType; } @Override public String toString() { return key + "=" + value; } } }
package org.jtrfp.trcl.mem; import java.lang.reflect.Field; import java.nio.ByteBuffer; import org.jtrfp.trcl.core.IndexPool; public abstract class MemoryWindow { private IByteBuffer buffer; private final int objectSizeInBytes; private final IndexPool indexPool = new IndexPool(); //TODO: GrowthBehavior to resize the buffer. protected MemoryWindow(int objectSizeInBytes){ this.objectSizeInBytes=objectSizeInBytes; }//end constructor protected final void init(){ final Class thisClass = getClass(); for(Field f:getClass().getFields()){ if(Variable.class.isAssignableFrom(f.getType())){ try{final Variable<?,?> var = (Variable<?,?>)f.get(this); var.initialize(this);} catch(IllegalAccessException e){e.printStackTrace();} }//end if(Variable) }//end for(fields) }//end init() public final int create(){ return indexPool.pop(); } public final int getNumObjects(){ return indexPool.getMaxCapacity(); } public static abstract class Variable<TYPE, THIS_CLASS extends Variable>{ private MemoryWindow parent; private int byteOffset; void initialize(MemoryWindow parent){ this.parent=parent; } public abstract THIS_CLASS set(int objectIndex, TYPE value); public abstract TYPE get(int objectIndex); /** * @return the parent */ protected final MemoryWindow getParent() { return parent; } public final THIS_CLASS byteOffset(int off){ this.byteOffset=off; return (THIS_CLASS)this; } protected final int byteOffset(){ return byteOffset;} }//end Property public static final class IntVariable extends Variable<Integer,IntVariable>{ @Override public IntVariable set(int objectIndex, Integer value) { getParent().getBuffer().putInt(byteOffset()+objectIndex*getParent().getObjectSizeInBytes(), value); return this; } @Override public Integer get(int objectIndex) { return getParent().getBuffer().getInt(byteOffset()+objectIndex*getParent().getObjectSizeInBytes()); } }//end IntVariable public static final class ByteVariable extends Variable<Byte, ByteVariable>{ @Override public ByteVariable set(int objectIndex, Byte value) { getParent().getBuffer().put(byteOffset()+objectIndex*getParent().getObjectSizeInBytes(), value); return this; } @Override public Byte get(int objectIndex) { return getParent().getBuffer().get(byteOffset()+objectIndex*getParent().getObjectSizeInBytes()); } }//end ByteVariable public static final class ByteArrayVariable extends Variable<ByteBuffer,ByteArrayVariable>{ private int arrayLen=0;//Keep for automatic size calculation public ByteArrayVariable(int arrayLen){this.arrayLen=arrayLen;} @Override public ByteArrayVariable set(int objectIndex, ByteBuffer value) { getParent().getBuffer().put(byteOffset()+objectIndex*getParent().getObjectSizeInBytes(), value); return this; } public ByteArrayVariable set(int objectIndex, int offsetInBytes, ByteBuffer value) { getParent().getBuffer().put(offsetInBytes+byteOffset()+objectIndex*getParent().getObjectSizeInBytes(), value); return this; } @Override public ByteBuffer get(int objectIndex) { return null;//unimplemented } }//end Double2FloatArrayVariable public static final class Double2FloatArrayVariable extends Variable<double [],Double2FloatArrayVariable>{ private int arrayLen=0; public Double2FloatArrayVariable(int arrayLen){this.arrayLen=arrayLen;} @Override public Double2FloatArrayVariable set(int objectIndex, double[] value) { for(int i=0; i<arrayLen; i++){ getParent().getBuffer().putFloat(i*4+byteOffset()+objectIndex*getParent().getObjectSizeInBytes(),(float)value[i]); } return this; } @Override public double[] get(int objectIndex) { final double [] result = new double[arrayLen]; for(int i=0; i<arrayLen; i++){ result[i]=getParent().getBuffer().getFloat(i*4+byteOffset()+objectIndex*getParent().getObjectSizeInBytes()); } return result; } }//end Double2FloatArrayVariable /** * @return the buffer */ public final IByteBuffer getBuffer() { return buffer; } /** * @param buffer the buffer to set */ public final void setBuffer(IByteBuffer buffer) { this.buffer = buffer; } public final int getObjectSizeInBytes(){return objectSizeInBytes;} public final int getPhysicalAddressInBytes(int objectIndex){return buffer.logical2PhysicalAddressBytes(objectIndex*objectSizeInBytes);} public final int numObjectsPerPage(){return PagedByteBuffer.PAGE_SIZE_BYTES/getObjectSizeInBytes();} public final int numPages(){return getNumObjects()/numObjectsPerPage();} public final int logicalPage2PhysicalPage(int logicalPage){ return buffer. logical2PhysicalAddressBytes(logicalPage*PagedByteBuffer.PAGE_SIZE_BYTES)/PagedByteBuffer.PAGE_SIZE_BYTES;} }//end ObjectWindow
package org.pentaho.di.core.row; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.EOFException; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.math.BigDecimal; import java.net.SocketTimeoutException; import java.nio.charset.Charset; import java.text.DecimalFormat; import java.text.DecimalFormatSymbols; import java.text.NumberFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Locale; import org.pentaho.di.compatibility.Value; import org.pentaho.di.core.Const; import org.pentaho.di.core.Messages; import org.pentaho.di.core.exception.KettleEOFException; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.exception.KettleValueException; import org.pentaho.di.core.xml.XMLHandler; import org.w3c.dom.Node; public class ValueMeta implements ValueMetaInterface { public static final String DEFAULT_DATE_FORMAT_MASK = "yyyy/MM/dd HH:mm:ss.SSS"; public static final String XML_META_TAG = "value-meta"; public static final String XML_DATA_TAG = "value-data"; private String name; private int length; private int precision; private int type; private int trimType; private int storageType; private String origin; private String comments; private Object[] index; private String conversionMask; private String stringEncoding; private String decimalSymbol; private String groupingSymbol; private String currencySymbol; private boolean caseInsensitive; private boolean sortedDescending; private boolean outputPaddingEnabled; private boolean largeTextField; private Locale dateFormatLocale; private boolean dateFormatLenient; private SimpleDateFormat dateFormat; private boolean dateFormatChanged; private DecimalFormat decimalFormat; private boolean decimalFormatChanged; private ValueMetaInterface storageMetadata; private boolean identicalFormat; private ValueMetaInterface conversionMetadata; boolean singleByteEncoding; private long numberOfBinaryStringConversions; /** * The trim type codes */ public final static String trimTypeCode[] = { "none", "left", "right", "both" }; /** * The trim description */ public final static String trimTypeDesc[] = { Messages.getString("ValueMeta.TrimType.None"), Messages.getString("ValueMeta.TrimType.Left"), Messages.getString("ValueMeta.TrimType.Right"), Messages.getString("ValueMeta.TrimType.Both") }; public ValueMeta() { this(null, ValueMetaInterface.TYPE_NONE, -1, -1); } public ValueMeta(String name) { this(name, ValueMetaInterface.TYPE_NONE, -1, -1); } public ValueMeta(String name, int type) { this(name, type, -1, -1); } public ValueMeta(String name, int type, int storageType) { this(name, type, -1, -1); this.storageType = storageType; setDefaultConversionMask(); } public ValueMeta(String name, int type, int length, int precision) { this.name = name; this.type = type; this.length = length; this.precision = precision; this.storageType=STORAGE_TYPE_NORMAL; this.sortedDescending=false; this.outputPaddingEnabled=false; this.decimalSymbol = ""+Const.DEFAULT_DECIMAL_SEPARATOR; this.groupingSymbol = ""+Const.DEFAULT_GROUPING_SEPARATOR; this.dateFormatLocale = Locale.getDefault(); this.identicalFormat = true; determineSingleByteEncoding(); setDefaultConversionMask(); } public static final String[] SINGLE_BYTE_ENCODINGS = new String[] { "ISO8859_1", "Cp1252", "ASCII", "Cp037", "Cp273", "Cp277", "Cp278", "Cp280", "Cp284", "Cp285", "Cp297", "Cp420","Cp424", "Cp437", "Cp500", "Cp737", "Cp775", "Cp850", "Cp852", "Cp855", "Cp856", "Cp857", "Cp858", "Cp860", "Cp861", "Cp862", "Cp863", "Cp865", "Cp866", "Cp869", "Cp870", "Cp871", "Cp875", "Cp918", "Cp921", "Cp922", "Cp1140", "Cp1141", "Cp1142", "Cp1143", "Cp1144", "Cp1145", "Cp1146", "Cp1147", "Cp1148", "Cp1149", "Cp1250", "Cp1251", "Cp1253", "Cp1254", "Cp1255", "Cp1257", "ISO8859_2", "ISO8859_3", "ISO8859_5", "ISO8859_5", "ISO8859_6", "ISO8859_7", "ISO8859_8", "ISO8859_9", "ISO8859_13", "ISO8859_15", "ISO8859_15_FDIS", "MacCentralEurope", "MacCroatian", "MacCyrillic", "MacDingbat", "MacGreek", "MacHebrew", "MacIceland", "MacRoman", "MacRomania", "MacSymbol", "MacTurkish", "MacUkraine", }; private void setDefaultConversionMask() { // Set some sensible default mask on the numbers switch(type) { case TYPE_INTEGER: setConversionMask("#;-#"); break; case TYPE_NUMBER: setConversionMask("#.#;-#.#"); break; default: break; } } private void determineSingleByteEncoding() { singleByteEncoding=false; Charset cs; if (Const.isEmpty(stringEncoding)) { cs = Charset.defaultCharset(); } else { cs = Charset.forName(stringEncoding); } // See if the default character set for input is single byte encoded. for (String charSetEncoding : SINGLE_BYTE_ENCODINGS) { if (cs.toString().equalsIgnoreCase(charSetEncoding)) singleByteEncoding=true; } } public ValueMeta clone() { try { ValueMeta valueMeta = (ValueMeta) super.clone(); valueMeta.dateFormat = null; valueMeta.decimalFormat = null; if (dateFormatLocale!=null) valueMeta.dateFormatLocale = (Locale) dateFormatLocale.clone(); if (storageMetadata!=null) valueMeta.storageMetadata = storageMetadata.clone(); if (conversionMetadata!=null) valueMeta.conversionMetadata = conversionMetadata.clone(); valueMeta.compareStorageAndActualFormat(); return valueMeta; } catch (CloneNotSupportedException e) { return null; } } /** * @return the comments */ public String getComments() { return comments; } /** * @param comments the comments to set */ public void setComments(String comments) { this.comments = comments; } /** * @return the index */ public Object[] getIndex() { return index; } /** * @param index the index to set */ public void setIndex(Object[] index) { this.index = index; } /** * @return the length */ public int getLength() { return length; } /** * @param length the length to set */ public void setLength(int length) { this.length = length; } /** * @param length the length to set */ public void setLength(int length, int precision) { this.length = length; this.precision = precision; } /** * @return the name */ public String getName() { return name; } /** * @param name the name to set */ public void setName(String name) { this.name = name; } /** * @return the origin */ public String getOrigin() { return origin; } /** * @param origin the origin to set */ public void setOrigin(String origin) { this.origin = origin; } /** * @return the precision */ public int getPrecision() { // For backward compatibility we need to tweak a bit... if (isInteger() || isBinary()) return 0; if (isString() || isBoolean()) return -1; return precision; } /** * @param precision the precision to set */ public void setPrecision(int precision) { this.precision = precision; } /** * @return the storageType */ public int getStorageType() { return storageType; } /** * @param storageType the storageType to set */ public void setStorageType(int storageType) { this.storageType = storageType; } public boolean isStorageNormal() { return storageType == STORAGE_TYPE_NORMAL; } public boolean isStorageIndexed() { return storageType == STORAGE_TYPE_INDEXED; } public boolean isStorageBinaryString() { return storageType == STORAGE_TYPE_BINARY_STRING; } /** * @return the type */ public int getType() { return type; } /** * @param type the type to set */ public void setType(int type) { this.type = type; } /** * @return the conversionMask */ public String getConversionMask() { return conversionMask; } /** * @param conversionMask the conversionMask to set */ public void setConversionMask(String conversionMask) { this.conversionMask = conversionMask; dateFormatChanged = true; decimalFormatChanged = true; compareStorageAndActualFormat(); } /** * @return the encoding */ public String getStringEncoding() { return stringEncoding; } /** * @param encoding the encoding to set */ public void setStringEncoding(String encoding) { this.stringEncoding = encoding; determineSingleByteEncoding(); compareStorageAndActualFormat(); } /** * @return the decimalSymbol */ public String getDecimalSymbol() { return decimalSymbol; } /** * @param decimalSymbol the decimalSymbol to set */ public void setDecimalSymbol(String decimalSymbol) { this.decimalSymbol = decimalSymbol; decimalFormatChanged = true; compareStorageAndActualFormat(); } /** * @return the groupingSymbol */ public String getGroupingSymbol() { return groupingSymbol; } /** * @param groupingSymbol the groupingSymbol to set */ public void setGroupingSymbol(String groupingSymbol) { this.groupingSymbol = groupingSymbol; decimalFormatChanged = true; compareStorageAndActualFormat(); } /** * @return the currencySymbol */ public String getCurrencySymbol() { return currencySymbol; } /** * @param currencySymbol the currencySymbol to set */ public void setCurrencySymbol(String currencySymbol) { this.currencySymbol = currencySymbol; decimalFormatChanged = true; } /** * @return the caseInsensitive */ public boolean isCaseInsensitive() { return caseInsensitive; } /** * @param caseInsensitive the caseInsensitive to set */ public void setCaseInsensitive(boolean caseInsensitive) { this.caseInsensitive = caseInsensitive; } /** * @return the sortedDescending */ public boolean isSortedDescending() { return sortedDescending; } /** * @param sortedDescending the sortedDescending to set */ public void setSortedDescending(boolean sortedDescending) { this.sortedDescending = sortedDescending; } /** * @return true if output padding is enabled (padding to specified length) */ public boolean isOutputPaddingEnabled() { return outputPaddingEnabled; } /** * @param outputPaddingEnabled Set to true if output padding is to be enabled (padding to specified length) */ public void setOutputPaddingEnabled(boolean outputPaddingEnabled) { this.outputPaddingEnabled = outputPaddingEnabled; } /** * @return true if this is a large text field (CLOB, TEXT) with arbitrary length. */ public boolean isLargeTextField() { return largeTextField; } /** * @param largeTextField Set to true if this is to be a large text field (CLOB, TEXT) with arbitrary length. */ public void setLargeTextField(boolean largeTextField) { this.largeTextField = largeTextField; } /** * @return the dateFormatLenient */ public boolean isDateFormatLenient() { return dateFormatLenient; } /** * @param dateFormatLenient the dateFormatLenient to set */ public void setDateFormatLenient(boolean dateFormatLenient) { this.dateFormatLenient = dateFormatLenient; dateFormatChanged=true; } /** * @return the dateFormatLocale */ public Locale getDateFormatLocale() { return dateFormatLocale; } /** * @param dateFormatLocale the dateFormatLocale to set */ public void setDateFormatLocale(Locale dateFormatLocale) { this.dateFormatLocale = dateFormatLocale; dateFormatChanged=true; } // DATE + STRING private synchronized String convertDateToString(Date date) { if (date==null) return null; return getDateFormat().format(date); } private static SimpleDateFormat compatibleDateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss.SSS"); private synchronized String convertDateToCompatibleString(Date date) { if (date==null) return null; return compatibleDateFormat.format(date); } private synchronized Date convertStringToDate(String string) throws KettleValueException { if (Const.isEmpty(string)) return null; string = Const.trimToType(string, getTrimType()); // see if trimming needs to be performed before conversion try { return getDateFormat().parse(string); } catch (ParseException e) { String dateFormat = (getDateFormat() != null) ? getDateFormat().toPattern() : "null"; throw new KettleValueException(toString()+" : couldn't convert string ["+string+"] to a date using format ["+dateFormat+"]", e); } } // DATE + NUMBER private Double convertDateToNumber(Date date) { return new Double( date.getTime() ); } private Date convertNumberToDate(Double number) { return new Date( number.longValue() ); } // DATE + INTEGER private Long convertDateToInteger(Date date) { return new Long( date.getTime() ); } private Date convertIntegerToDate(Long number) { return new Date( number.longValue() ); } // DATE + BIGNUMBER private BigDecimal convertDateToBigNumber(Date date) { return new BigDecimal( date.getTime() ); } private Date convertBigNumberToDate(BigDecimal number) { return new Date( number.longValue() ); } private synchronized String convertNumberToString(Double number) throws KettleValueException { if (number==null) { if (!outputPaddingEnabled || length<1) { return null; } else { // Return strings padded to the specified length... // This is done for backward compatibility with 2.5.x // We just optimized this a bit... String[] emptyPaddedStrings = Const.getEmptyPaddedStrings(); if (length<emptyPaddedStrings.length) { return emptyPaddedStrings[length]; } else { return Const.rightPad("", length); } } } try { return getDecimalFormat().format(number); } catch(Exception e) { throw new KettleValueException(toString()+" : couldn't convert Number to String ", e); } } private synchronized String convertNumberToCompatibleString(Double number) throws KettleValueException { if (number==null) return null; return Double.toString(number); } private synchronized Double convertStringToNumber(String string) throws KettleValueException { if (Const.isEmpty(string)) return null; string = Const.trimToType(string, getTrimType()); // see if trimming needs to be performed before conversion try { return new Double( getDecimalFormat().parse(string).doubleValue() ); } catch(Exception e) { throw new KettleValueException(toString()+" : couldn't convert String to number ", e); } } public synchronized SimpleDateFormat getDateFormat() { // If we have a Date that is represented as a String // In that case we can set the format of the original Date on the String value metadata in the form of a conversion metadata object. // That way, we can always convert from Date to String and back without a problem, no matter how complex the format was. // As such, we should return the date SimpleDateFormat of the conversion metadata. if (conversionMetadata!=null ) { return conversionMetadata.getDateFormat(); } if (dateFormat==null || dateFormatChanged) { // This may not become static as the class is not thread-safe! dateFormat = new SimpleDateFormat(); String mask; if (Const.isEmpty(conversionMask)) { mask = DEFAULT_DATE_FORMAT_MASK; } else { mask = conversionMask; } if (dateFormatLocale==null || dateFormatLocale.equals(Locale.getDefault())) { dateFormat = new SimpleDateFormat(mask); } else { dateFormat = new SimpleDateFormat(mask, dateFormatLocale); } dateFormatChanged=false; } return dateFormat; } public synchronized DecimalFormat getDecimalFormat() { // If we have an Integer that is represented as a String // In that case we can set the format of the original Integer on the String value metadata in the form of a conversion metadata object. // That way, we can always convert from Integer to String and back without a problem, no matter how complex the format was. // As such, we should return the decimal format of the conversion metadata. if (conversionMetadata!=null ) { return conversionMetadata.getDecimalFormat(); } // Calculate the decimal format as few times as possible. // That is because creating or changing a DecimalFormat object is very CPU hungry. if (decimalFormat==null || decimalFormatChanged) { decimalFormat = (DecimalFormat)NumberFormat.getInstance(); DecimalFormatSymbols decimalFormatSymbols = decimalFormat.getDecimalFormatSymbols(); if (!Const.isEmpty(currencySymbol)) decimalFormatSymbols.setCurrencySymbol( currencySymbol ); if (!Const.isEmpty(groupingSymbol)) decimalFormatSymbols.setGroupingSeparator( groupingSymbol.charAt(0) ); if (!Const.isEmpty(decimalSymbol)) decimalFormatSymbols.setDecimalSeparator( decimalSymbol.charAt(0) ); decimalFormat.setDecimalFormatSymbols(decimalFormatSymbols); // Apply the conversion mask if we have one... if (!Const.isEmpty(conversionMask)) { decimalFormat.applyPattern(conversionMask); } else { switch(type) { case TYPE_INTEGER: { if (length<1) { decimalFormat.applyPattern(" } else { StringBuffer integerPattern=new StringBuffer(); // First the format for positive integers... integerPattern.append(" "); for (int i=0;i<getLength();i++) integerPattern.append('0'); // all zeroes. integerPattern.append(";"); // Then the format for the negative numbers... integerPattern.append("-"); for (int i=0;i<getLength();i++) integerPattern.append('0'); // all zeroes. decimalFormat.applyPattern(integerPattern.toString()); } } break; case TYPE_NUMBER: { if (length<1) { decimalFormat.applyPattern(" } else { StringBuffer numberPattern=new StringBuffer(); // First do the format for positive numbers... numberPattern.append(' '); // to compensate for minus sign. if (precision<0) // Default: two decimals { for (int i=0;i<length;i++) numberPattern.append('0'); numberPattern.append(".00"); // for the .00 } else // Floating point format 00001234,56 --> (12,2) { for (int i=0;i<=length;i++) numberPattern.append('0'); // all zeroes. int pos = length-precision+1; if (pos>=0 && pos <numberPattern.length()) { numberPattern.setCharAt(length-precision+1, '.'); // one 'comma' } } // Now do the format for negative numbers... StringBuffer negativePattern = new StringBuffer(numberPattern); negativePattern.setCharAt(0, '-'); numberPattern.append(";"); numberPattern.append(negativePattern); // Apply the pattern... decimalFormat.applyPattern(numberPattern.toString()); } } } } decimalFormatChanged=false; } return decimalFormat; } private synchronized String convertIntegerToString(Long integer) throws KettleValueException { if (integer==null) { if (!outputPaddingEnabled || length<1) { return null; } else { // Return strings padded to the specified length... // This is done for backward compatibility with 2.5.x // We just optimized this a bit... String[] emptyPaddedStrings = Const.getEmptyPaddedStrings(); if (length<emptyPaddedStrings.length) { return emptyPaddedStrings[length]; } else { return Const.rightPad("", length); } } } try { return getDecimalFormat().format(integer); } catch(Exception e) { throw new KettleValueException(toString()+" : couldn't convert Long to String ", e); } } private synchronized String convertIntegerToCompatibleString(Long integer) throws KettleValueException { if (integer==null) return null; return Long.toString(integer); } private synchronized Long convertStringToInteger(String string) throws KettleValueException { if (Const.isEmpty(string)) return null; string = Const.trimToType(string, getTrimType()); // see if trimming needs to be performed before conversion try { return new Long( getDecimalFormat().parse(string).longValue() ); } catch(Exception e) { throw new KettleValueException(toString()+" : couldn't convert String to Integer", e); } } private synchronized String convertBigNumberToString(BigDecimal number) throws KettleValueException { if (number==null) return null; String string = number.toString(); return string; } private synchronized BigDecimal convertStringToBigNumber(String string) throws KettleValueException { if (Const.isEmpty(string)) return null; string = Const.trimToType(string, getTrimType()); // see if trimming needs to be performed before conversion /* if (!".".equalsIgnoreCase(decimalSymbol)) { string = Const.replace(string, decimalSymbol.substring(0, 1), "."); } */ try { return new BigDecimal( string ); } catch(NumberFormatException e) { throw new KettleValueException(toString()+" : couldn't convert string value '" + string + "' to a number."); } } // BOOLEAN + STRING private String convertBooleanToString(Boolean bool) { if (bool==null) return null; if (length>=3) { return bool.booleanValue()?"true":"false"; } else { return bool.booleanValue()?"Y":"N"; } } public static Boolean convertStringToBoolean(String string) { if (Const.isEmpty(string)) return null; return Boolean.valueOf( "Y".equalsIgnoreCase(string) || "TRUE".equalsIgnoreCase(string) || "YES".equalsIgnoreCase(string) || "1".equals(string) ); } // BOOLEAN + NUMBER private Double convertBooleanToNumber(Boolean bool) { if (bool==null) return null; return new Double( bool.booleanValue() ? 1.0 : 0.0 ); } private Boolean convertNumberToBoolean(Double number) { if (number==null) return null; return Boolean.valueOf( number.intValue() != 0 ); } // BOOLEAN + INTEGER private Long convertBooleanToInteger(Boolean bool) { if (bool==null) return null; return Long.valueOf( bool.booleanValue() ? 1L : 0L ); } private Boolean convertIntegerToBoolean(Long number) { if (number==null) return null; return Boolean.valueOf( number.longValue() != 0 ); } // BOOLEAN + BIGNUMBER private BigDecimal convertBooleanToBigNumber(Boolean bool) { if (bool==null) return null; return new BigDecimal( bool.booleanValue() ? 1.0 : 0.0 ); } private Boolean convertBigNumberToBoolean(BigDecimal number) { if (number==null) return null; return Boolean.valueOf( number.intValue() != 0 ); } /** * Converts a byte[] stored in a binary string storage type into a String; * * @param binary the binary string * @return the String in the correct encoding. * @throws KettleValueException */ private String convertBinaryStringToString(byte[] binary) throws KettleValueException { // OK, so we have an internal representation of the original object, read from file. // Before we release it back, we have to see if we don't have to do a String-<type>-String // conversion with different masks. // This obviously only applies to numeric data and dates. // We verify if this is true or false in advance for performance reasons if (binary==null || binary.length==0) return null; String encoding; if (identicalFormat) encoding = getStringEncoding(); else encoding = storageMetadata.getStringEncoding(); if (Const.isEmpty(encoding)) { return new String(binary); } else { try { return new String(binary, encoding); } catch(UnsupportedEncodingException e) { throw new KettleValueException(toString()+" : couldn't convert binary value to String with specified string encoding ["+stringEncoding+"]", e); } } } /** * Converts the specified data object to the normal storage type. * @param object the data object to convert * @return the data in a normal storage type * @throws KettleValueException In case there is a data conversion error. */ public Object convertToNormalStorageType(Object object) throws KettleValueException { if (object==null) return null; switch(storageType) { case STORAGE_TYPE_NORMAL: return object; case STORAGE_TYPE_BINARY_STRING : return convertBinaryStringToNativeType((byte[])object); case STORAGE_TYPE_INDEXED : return index[(Integer)object]; default: throw new KettleValueException(toStringMeta()+" : Unknown storage type ["+storageType+"] while converting to normal storage type"); } } /** * Converts the specified data object to the binary string storage type. * @param object the data object to convert * @return the data in a binary string storage type * @throws KettleValueException In case there is a data conversion error. */ public Object convertToBinaryStringStorageType(Object object) throws KettleValueException { if (object==null) return null; switch(storageType) { case STORAGE_TYPE_NORMAL: return convertNormalStorageTypeToBinaryString(object); case STORAGE_TYPE_BINARY_STRING : return object; case STORAGE_TYPE_INDEXED : return convertNormalStorageTypeToBinaryString( index[(Integer)object] ); default: throw new KettleValueException(toStringMeta()+" : Unknown storage type ["+storageType+"] while converting to normal storage type"); } } /** * Convert the binary data to the actual data type.<br> * - byte[] --> Long (Integer) * - byte[] --> Double (Number) * - byte[] --> BigDecimal (BigNumber) * - byte[] --> Date (Date) * - byte[] --> Boolean (Boolean) * - byte[] --> byte[] (Binary) * * @param binary * @return * @throws KettleValueException */ public Object convertBinaryStringToNativeType(byte[] binary) throws KettleValueException { if (binary==null) return null; numberOfBinaryStringConversions++; // OK, so we have an internal representation of the original object, read from file. // First we decode it in the correct encoding String string = convertBinaryStringToString(binary); // In this method we always must convert the data. // We use the storageMetadata object to convert the binary string object. // --> Convert from the String format to the current data type... return convertData(storageMetadata, string); } public Object convertNormalStorageTypeToBinaryString(Object object) throws KettleValueException { if (object==null) return null; String string = getString(object); return convertStringToBinaryString(string); } private byte[] convertStringToBinaryString(String string) throws KettleValueException { if (string==null) return null; if (Const.isEmpty(stringEncoding)) { return string.getBytes(); } else { try { return string.getBytes(stringEncoding); } catch(UnsupportedEncodingException e) { throw new KettleValueException(toString()+" : couldn't convert String to Binary with specified string encoding ["+stringEncoding+"]", e); } } } /** * Clones the data. Normally, we don't have to do anything here, but just for arguments and safety, * we do a little extra work in case of binary blobs and Date objects. * We should write a programmers manual later on to specify in all clarity that * "we always overwrite/replace values in the Object[] data rows, we never modify them". * * @return a cloned data object if needed */ public Object cloneValueData(Object object) throws KettleValueException { if (object==null) return null; if (storageType==STORAGE_TYPE_NORMAL) { switch(getType()) { case ValueMeta.TYPE_STRING: case ValueMeta.TYPE_NUMBER: case ValueMeta.TYPE_INTEGER: case ValueMeta.TYPE_BOOLEAN: case ValueMeta.TYPE_BIGNUMBER: // primitive data types: we can only overwrite these, not change them return object; case ValueMeta.TYPE_DATE: return new Date( ((Date)object).getTime() ); // just to make sure: very inexpensive too. case ValueMeta.TYPE_BINARY: byte[] origin = (byte[]) object; byte[] target = new byte[origin.length]; System.arraycopy(origin, 0, target, 0, origin.length); return target; default: throw new KettleValueException(toString()+": unable to make copy of value type: "+getType()); } } else { return object; } } public String getCompatibleString(Object object) throws KettleValueException { try { String string; switch(type) { case TYPE_DATE: switch(storageType) { case STORAGE_TYPE_NORMAL: string = convertDateToCompatibleString((Date)object); break; case STORAGE_TYPE_BINARY_STRING: string = convertDateToCompatibleString((Date)convertBinaryStringToNativeType((byte[])object)); break; case STORAGE_TYPE_INDEXED: string = object==null ? null : convertDateToCompatibleString((Date)index[((Integer)object).intValue()]); break; default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } break; case TYPE_NUMBER: switch(storageType) { case STORAGE_TYPE_NORMAL: string = convertNumberToCompatibleString((Double)object); break; case STORAGE_TYPE_BINARY_STRING: string = convertNumberToCompatibleString((Double)convertBinaryStringToNativeType((byte[])object)); break; case STORAGE_TYPE_INDEXED: string = object==null ? null : convertNumberToCompatibleString((Double)index[((Integer)object).intValue()]); break; default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } break; case TYPE_INTEGER: switch(storageType) { case STORAGE_TYPE_NORMAL: string = convertIntegerToCompatibleString((Long)object); break; case STORAGE_TYPE_BINARY_STRING: string = convertIntegerToCompatibleString((Long)convertBinaryStringToNativeType((byte[])object)); break; case STORAGE_TYPE_INDEXED: string = object==null ? null : convertIntegerToCompatibleString((Long)index[((Integer)object).intValue()]); break; default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } break; default: return getString(object); } return string; } catch(ClassCastException e) { throw new KettleValueException(toString()+" : There was a data type error: the data type of "+object.getClass().getName()+" object ["+object+"] does not correspond to value meta ["+toStringMeta()+"]"); } } public String getString(Object object) throws KettleValueException { try { String string; switch(type) { case TYPE_STRING: switch(storageType) { case STORAGE_TYPE_NORMAL: string = (String)object; break; case STORAGE_TYPE_BINARY_STRING: string = (String)convertBinaryStringToNativeType((byte[])object); break; case STORAGE_TYPE_INDEXED: string = object==null ? null : (String) index[((Integer)object).intValue()]; break; default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } if ( string != null ) string = trim(string); break; case TYPE_DATE: switch(storageType) { case STORAGE_TYPE_NORMAL: string = convertDateToString((Date)object); break; case STORAGE_TYPE_BINARY_STRING: string = convertDateToString((Date)convertBinaryStringToNativeType((byte[])object)); break; case STORAGE_TYPE_INDEXED: string = object==null ? null : convertDateToString((Date)index[((Integer)object).intValue()]); break; default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } break; case TYPE_NUMBER: switch(storageType) { case STORAGE_TYPE_NORMAL: string = convertNumberToString((Double)object); break; case STORAGE_TYPE_BINARY_STRING: string = convertNumberToString((Double)convertBinaryStringToNativeType((byte[])object)); break; case STORAGE_TYPE_INDEXED: string = object==null ? null : convertNumberToString((Double)index[((Integer)object).intValue()]); break; default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } break; case TYPE_INTEGER: switch(storageType) { case STORAGE_TYPE_NORMAL: string = convertIntegerToString((Long)object); break; case STORAGE_TYPE_BINARY_STRING: string = convertIntegerToString((Long)convertBinaryStringToNativeType((byte[])object)); break; case STORAGE_TYPE_INDEXED: string = object==null ? null : convertIntegerToString((Long)index[((Integer)object).intValue()]); break; default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } break; case TYPE_BIGNUMBER: switch(storageType) { case STORAGE_TYPE_NORMAL: string = convertBigNumberToString((BigDecimal)object); break; case STORAGE_TYPE_BINARY_STRING: string = convertBigNumberToString((BigDecimal)convertBinaryStringToNativeType((byte[])object)); break; case STORAGE_TYPE_INDEXED: string = object==null ? null : convertBigNumberToString((BigDecimal)index[((Integer)object).intValue()]); break; default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } break; case TYPE_BOOLEAN: switch(storageType) { case STORAGE_TYPE_NORMAL: string = convertBooleanToString((Boolean)object); break; case STORAGE_TYPE_BINARY_STRING: string = convertBooleanToString((Boolean)convertBinaryStringToNativeType((byte[])object)); break; case STORAGE_TYPE_INDEXED: string = object==null ? null : convertBooleanToString((Boolean)index[((Integer)object).intValue()]); break; default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } break; case TYPE_BINARY: switch(storageType) { case STORAGE_TYPE_NORMAL: string = convertBinaryStringToString((byte[])object); break; case STORAGE_TYPE_BINARY_STRING: string = convertBinaryStringToString((byte[])object); break; case STORAGE_TYPE_INDEXED: string = object==null ? null : convertBinaryStringToString((byte[])index[((Integer)object).intValue()]); break; default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } break; case TYPE_SERIALIZABLE: switch(storageType) { case STORAGE_TYPE_NORMAL: string = object.toString(); break; // just go for the default toString() case STORAGE_TYPE_BINARY_STRING: string = convertBinaryStringToString((byte[])object); break; case STORAGE_TYPE_INDEXED: string = object==null ? null : index[((Integer)object).intValue()].toString(); break; // just go for the default toString() default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } break; default: throw new KettleValueException(toString()+" : Unknown type "+type+" specified."); } if (isOutputPaddingEnabled() && getLength()>0) { string = ValueDataUtil.rightPad(string, getLength()); } return string; } catch(ClassCastException e) { throw new KettleValueException(toString()+" : There was a data type error: the data type of "+object.getClass().getName()+" object ["+object+"] does not correspond to value meta ["+toStringMeta()+"]"); } } private String trim(String string) { switch(getTrimType()) { case TRIM_TYPE_NONE : break; case TRIM_TYPE_RIGHT : string = Const.rtrim(string); break; case TRIM_TYPE_LEFT : string = Const.ltrim(string); break; case TRIM_TYPE_BOTH : string = Const.trim(string); break; default: break; } return string; } public Double getNumber(Object object) throws KettleValueException { try { if (object==null) // NULL { return null; } switch(type) { case TYPE_NUMBER: switch(storageType) { case STORAGE_TYPE_NORMAL: return (Double)object; case STORAGE_TYPE_BINARY_STRING: return (Double)convertBinaryStringToNativeType((byte[])object); case STORAGE_TYPE_INDEXED: return (Double)index[((Integer)object).intValue()]; default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_STRING: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertStringToNumber((String)object); case STORAGE_TYPE_BINARY_STRING: return convertStringToNumber((String)convertBinaryStringToNativeType((byte[])object)); case STORAGE_TYPE_INDEXED: return convertStringToNumber((String) index[((Integer)object).intValue()]); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_DATE: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertDateToNumber((Date)object); case STORAGE_TYPE_BINARY_STRING: return convertDateToNumber((Date)convertBinaryStringToNativeType((byte[])object)); case STORAGE_TYPE_INDEXED: return new Double( ((Date)index[((Integer)object).intValue()]).getTime() ); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_INTEGER: switch(storageType) { case STORAGE_TYPE_NORMAL: return new Double( ((Long)object).doubleValue() ); case STORAGE_TYPE_BINARY_STRING: return new Double( ((Long)convertBinaryStringToNativeType((byte[])object)).doubleValue() ); case STORAGE_TYPE_INDEXED: return new Double( ((Long)index[((Integer)object).intValue()]).doubleValue() ); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_BIGNUMBER: switch(storageType) { case STORAGE_TYPE_NORMAL: return new Double( ((BigDecimal)object).doubleValue() ); case STORAGE_TYPE_BINARY_STRING: return new Double( ((BigDecimal)convertBinaryStringToNativeType((byte[])object)).doubleValue() ); case STORAGE_TYPE_INDEXED: return new Double( ((BigDecimal)index[((Integer)object).intValue()]).doubleValue() ); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_BOOLEAN: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertBooleanToNumber( (Boolean)object ); case STORAGE_TYPE_BINARY_STRING: return convertBooleanToNumber( (Boolean)convertBinaryStringToNativeType((byte[])object) ); case STORAGE_TYPE_INDEXED: return convertBooleanToNumber( (Boolean)index[((Integer)object).intValue()] ); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_BINARY: throw new KettleValueException(toString()+" : I don't know how to convert binary values to numbers."); case TYPE_SERIALIZABLE: throw new KettleValueException(toString()+" : I don't know how to convert serializable values to numbers."); default: throw new KettleValueException(toString()+" : Unknown type "+type+" specified."); } } catch(Exception e) { throw new KettleValueException("Unexpected conversion error while converting value ["+toString()+"] to a Number", e); } } public Long getInteger(Object object) throws KettleValueException { try { if (object==null) // NULL { return null; } switch(type) { case TYPE_INTEGER: switch(storageType) { case STORAGE_TYPE_NORMAL: return (Long)object; case STORAGE_TYPE_BINARY_STRING: return (Long)convertBinaryStringToNativeType((byte[])object); case STORAGE_TYPE_INDEXED: return (Long)index[((Integer)object).intValue()]; default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_STRING: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertStringToInteger((String)object); case STORAGE_TYPE_BINARY_STRING: return convertStringToInteger((String)convertBinaryStringToNativeType((byte[])object)); case STORAGE_TYPE_INDEXED: return convertStringToInteger((String) index[((Integer)object).intValue()]); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_NUMBER: switch(storageType) { case STORAGE_TYPE_NORMAL: return new Long( Math.round(((Double)object).doubleValue()) ); case STORAGE_TYPE_BINARY_STRING: return new Long( Math.round(((Double)convertBinaryStringToNativeType((byte[])object)).doubleValue()) ); case STORAGE_TYPE_INDEXED: return new Long( Math.round(((Double)index[((Integer)object).intValue()]).doubleValue()) ); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_DATE: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertDateToInteger( (Date)object); case STORAGE_TYPE_BINARY_STRING: return new Long( ((Date)convertBinaryStringToNativeType((byte[])object)).getTime() ); case STORAGE_TYPE_INDEXED: return convertDateToInteger( (Date)index[((Integer)object).intValue()]); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_BIGNUMBER: switch(storageType) { case STORAGE_TYPE_NORMAL: return new Long( ((BigDecimal)object).longValue() ); case STORAGE_TYPE_BINARY_STRING: return new Long( ((BigDecimal)convertBinaryStringToNativeType((byte[])object)).longValue() ); case STORAGE_TYPE_INDEXED: return new Long( ((BigDecimal)index[((Integer)object).intValue()]).longValue() ); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_BOOLEAN: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertBooleanToInteger( (Boolean)object ); case STORAGE_TYPE_BINARY_STRING: return convertBooleanToInteger( (Boolean)convertBinaryStringToNativeType((byte[])object) ); case STORAGE_TYPE_INDEXED: return convertBooleanToInteger( (Boolean)index[((Integer)object).intValue()] ); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_BINARY: throw new KettleValueException(toString()+" : I don't know how to convert binary values to integers."); case TYPE_SERIALIZABLE: throw new KettleValueException(toString()+" : I don't know how to convert serializable values to integers."); default: throw new KettleValueException(toString()+" : Unknown type "+type+" specified."); } } catch(Exception e) { throw new KettleValueException("Unexpected conversion error while converting value ["+toString()+"] to an Integer", e); } } public BigDecimal getBigNumber(Object object) throws KettleValueException { if (object==null) // NULL { return null; } switch(type) { case TYPE_BIGNUMBER: switch(storageType) { case STORAGE_TYPE_NORMAL: return (BigDecimal)object; case STORAGE_TYPE_BINARY_STRING: return (BigDecimal)convertBinaryStringToNativeType((byte[])object); case STORAGE_TYPE_INDEXED: return (BigDecimal)index[((Integer)object).intValue()]; default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_STRING: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertStringToBigNumber( (String)object ); case STORAGE_TYPE_BINARY_STRING: return convertStringToBigNumber( (String)convertBinaryStringToNativeType((byte[])object) ); case STORAGE_TYPE_INDEXED: return convertStringToBigNumber((String) index[((Integer)object).intValue()]); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_INTEGER: switch(storageType) { case STORAGE_TYPE_NORMAL: return new BigDecimal( ((Long)object).doubleValue() ); case STORAGE_TYPE_BINARY_STRING: return new BigDecimal( ((Long)convertBinaryStringToNativeType((byte[])object)).longValue() ); case STORAGE_TYPE_INDEXED: return new BigDecimal( ((Long)index[((Integer)object).intValue()]).doubleValue() ); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_NUMBER: switch(storageType) { case STORAGE_TYPE_NORMAL: return new BigDecimal( ((Double)object).doubleValue() ); case STORAGE_TYPE_BINARY_STRING: return new BigDecimal( ((Double)convertBinaryStringToNativeType((byte[])object)).doubleValue() ); case STORAGE_TYPE_INDEXED: return new BigDecimal( ((Double)index[((Integer)object).intValue()]).doubleValue() ); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_DATE: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertDateToBigNumber( (Date)object ); case STORAGE_TYPE_BINARY_STRING: return convertDateToBigNumber( (Date)convertBinaryStringToNativeType((byte[])object) ); case STORAGE_TYPE_INDEXED: return convertDateToBigNumber( (Date)index[((Integer)object).intValue()] ); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_BOOLEAN: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertBooleanToBigNumber( (Boolean)object ); case STORAGE_TYPE_BINARY_STRING: return convertBooleanToBigNumber( (Boolean)convertBinaryStringToNativeType((byte[])object) ); case STORAGE_TYPE_INDEXED: return convertBooleanToBigNumber( (Boolean)index[((Integer)object).intValue()] ); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_BINARY: throw new KettleValueException(toString()+" : I don't know how to convert binary values to integers."); case TYPE_SERIALIZABLE: throw new KettleValueException(toString()+" : I don't know how to convert serializable values to integers."); default: throw new KettleValueException(toString()+" : Unknown type "+type+" specified."); } } public Boolean getBoolean(Object object) throws KettleValueException { if (object==null) // NULL { return null; } switch(type) { case TYPE_BOOLEAN: switch(storageType) { case STORAGE_TYPE_NORMAL: return (Boolean)object; case STORAGE_TYPE_BINARY_STRING: return (Boolean)convertBinaryStringToNativeType((byte[])object); case STORAGE_TYPE_INDEXED: return (Boolean)index[((Integer)object).intValue()]; default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_STRING: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertStringToBoolean( trim((String)object) ); case STORAGE_TYPE_BINARY_STRING: return convertStringToBoolean( trim((String)convertBinaryStringToNativeType((byte[])object)) ); case STORAGE_TYPE_INDEXED: return convertStringToBoolean( trim((String) index[((Integer)object).intValue()] )); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_INTEGER: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertIntegerToBoolean( (Long)object ); case STORAGE_TYPE_BINARY_STRING: return convertIntegerToBoolean( (Long)convertBinaryStringToNativeType((byte[])object) ); case STORAGE_TYPE_INDEXED: return convertIntegerToBoolean( (Long)index[((Integer)object).intValue()] ); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_NUMBER: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertNumberToBoolean( (Double)object ); case STORAGE_TYPE_BINARY_STRING: return convertNumberToBoolean( (Double)convertBinaryStringToNativeType((byte[])object) ); case STORAGE_TYPE_INDEXED: return convertNumberToBoolean( (Double)index[((Integer)object).intValue()] ); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_BIGNUMBER: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertBigNumberToBoolean( (BigDecimal)object ); case STORAGE_TYPE_BINARY_STRING: return convertBigNumberToBoolean( (BigDecimal)convertBinaryStringToNativeType((byte[])object) ); case STORAGE_TYPE_INDEXED: return convertBigNumberToBoolean( (BigDecimal)index[((Integer)object).intValue()] ); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_DATE: throw new KettleValueException(toString()+" : I don't know how to convert date values to booleans."); case TYPE_BINARY: throw new KettleValueException(toString()+" : I don't know how to convert binary values to booleans."); case TYPE_SERIALIZABLE: throw new KettleValueException(toString()+" : I don't know how to convert serializable values to booleans."); default: throw new KettleValueException(toString()+" : Unknown type "+type+" specified."); } } public Date getDate(Object object) throws KettleValueException { if (object==null) // NULL { return null; } switch(type) { case TYPE_DATE: switch(storageType) { case STORAGE_TYPE_NORMAL: return (Date)object; case STORAGE_TYPE_BINARY_STRING: return (Date)convertBinaryStringToNativeType((byte[])object); case STORAGE_TYPE_INDEXED: return (Date)index[((Integer)object).intValue()]; default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_STRING: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertStringToDate( (String)object ); case STORAGE_TYPE_BINARY_STRING: return convertStringToDate( (String)convertBinaryStringToNativeType((byte[])object) ); case STORAGE_TYPE_INDEXED: return convertStringToDate( (String) index[((Integer)object).intValue()] ); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_NUMBER: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertNumberToDate((Double)object); case STORAGE_TYPE_BINARY_STRING: return convertNumberToDate((Double)convertBinaryStringToNativeType((byte[])object) ); case STORAGE_TYPE_INDEXED: return convertNumberToDate((Double)index[((Integer)object).intValue()]); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_INTEGER: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertIntegerToDate((Long)object); case STORAGE_TYPE_BINARY_STRING: return convertIntegerToDate((Long)convertBinaryStringToNativeType((byte[])object)); case STORAGE_TYPE_INDEXED: return convertIntegerToDate((Long)index[((Integer)object).intValue()]); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_BIGNUMBER: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertBigNumberToDate((BigDecimal)object); case STORAGE_TYPE_BINARY_STRING: return convertBigNumberToDate((BigDecimal)convertBinaryStringToNativeType((byte[])object)); case STORAGE_TYPE_INDEXED: return convertBigNumberToDate((BigDecimal)index[((Integer)object).intValue()]); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_BOOLEAN: throw new KettleValueException(toString()+" : I don't know how to convert a boolean to a date."); case TYPE_BINARY: throw new KettleValueException(toString()+" : I don't know how to convert a binary value to date."); case TYPE_SERIALIZABLE: throw new KettleValueException(toString()+" : I don't know how to convert a serializable value to date."); default: throw new KettleValueException(toString()+" : Unknown type "+type+" specified."); } } public byte[] getBinary(Object object) throws KettleValueException { if (object==null) // NULL { return null; } switch(type) { case TYPE_BINARY: switch(storageType) { case STORAGE_TYPE_NORMAL: return (byte[])object; case STORAGE_TYPE_BINARY_STRING: return (byte[])object; case STORAGE_TYPE_INDEXED: return (byte[])index[((Integer)object).intValue()]; default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_DATE: throw new KettleValueException(toString()+" : I don't know how to convert a date to binary."); case TYPE_STRING: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertStringToBinaryString( (String)object ); case STORAGE_TYPE_BINARY_STRING: return (byte[])object; case STORAGE_TYPE_INDEXED: return convertStringToBinaryString( (String) index[((Integer)object).intValue()] ); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_NUMBER: throw new KettleValueException(toString()+" : I don't know how to convert a number to binary."); case TYPE_INTEGER: throw new KettleValueException(toString()+" : I don't know how to convert an integer to binary."); case TYPE_BIGNUMBER: throw new KettleValueException(toString()+" : I don't know how to convert a bignumber to binary."); case TYPE_BOOLEAN: throw new KettleValueException(toString()+" : I don't know how to convert a boolean to binary."); case TYPE_SERIALIZABLE: throw new KettleValueException(toString()+" : I don't know how to convert a serializable to binary."); default: throw new KettleValueException(toString()+" : Unknown type "+type+" specified."); } } public byte[] getBinaryString(Object object) throws KettleValueException { // If the input is a binary string, we should return the exact same binary object IF // and only IF the formatting options for the storage metadata and this object are the same. if (isStorageBinaryString() && identicalFormat) { return (byte[]) object; // shortcut it directly for better performance. } try { if (object==null) // NULL { return null; } switch(type) { case TYPE_STRING: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertStringToBinaryString((String)object); case STORAGE_TYPE_BINARY_STRING: return convertStringToBinaryString((String)convertBinaryStringToNativeType((byte[])object)); case STORAGE_TYPE_INDEXED: return convertStringToBinaryString((String) index[((Integer)object).intValue()]); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_DATE: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertStringToBinaryString(convertDateToString((Date)object)); case STORAGE_TYPE_BINARY_STRING: return convertStringToBinaryString(convertDateToString((Date)convertBinaryStringToNativeType((byte[])object))); case STORAGE_TYPE_INDEXED: return convertStringToBinaryString(convertDateToString((Date)index[((Integer)object).intValue()])); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_NUMBER: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertStringToBinaryString(convertNumberToString((Double)object)); case STORAGE_TYPE_BINARY_STRING: return convertStringToBinaryString(convertNumberToString((Double)convertBinaryStringToNativeType((byte[])object))); case STORAGE_TYPE_INDEXED: return convertStringToBinaryString(convertNumberToString((Double)index[((Integer)object).intValue()])); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_INTEGER: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertStringToBinaryString(convertIntegerToString((Long)object)); case STORAGE_TYPE_BINARY_STRING: return convertStringToBinaryString(convertIntegerToString((Long)convertBinaryStringToNativeType((byte[])object))); case STORAGE_TYPE_INDEXED: return convertStringToBinaryString(convertIntegerToString((Long)index[((Integer)object).intValue()])); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_BIGNUMBER: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertStringToBinaryString(convertBigNumberToString((BigDecimal)object)); case STORAGE_TYPE_BINARY_STRING: return convertStringToBinaryString(convertBigNumberToString((BigDecimal)convertBinaryStringToNativeType((byte[])object))); case STORAGE_TYPE_INDEXED: return convertStringToBinaryString(convertBigNumberToString((BigDecimal)index[((Integer)object).intValue()])); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_BOOLEAN: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertStringToBinaryString(convertBooleanToString((Boolean)object)); case STORAGE_TYPE_BINARY_STRING: return convertStringToBinaryString(convertBooleanToString((Boolean)convertBinaryStringToNativeType((byte[])object))); case STORAGE_TYPE_INDEXED: return convertStringToBinaryString(convertBooleanToString((Boolean)index[((Integer)object).intValue()])); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_BINARY: switch(storageType) { case STORAGE_TYPE_NORMAL: return (byte[])object; case STORAGE_TYPE_BINARY_STRING: return (byte[])object; case STORAGE_TYPE_INDEXED: return (byte[])index[((Integer)object).intValue()]; default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } case TYPE_SERIALIZABLE: switch(storageType) { case STORAGE_TYPE_NORMAL: return convertStringToBinaryString(object.toString()); case STORAGE_TYPE_BINARY_STRING: return (byte[])object; case STORAGE_TYPE_INDEXED: return convertStringToBinaryString( index[((Integer)object).intValue()].toString() ); default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified."); } default: throw new KettleValueException(toString()+" : Unknown type "+type+" specified."); } } catch(ClassCastException e) { throw new KettleValueException(toString()+" : There was a data type error: the data type of "+object.getClass().getName()+" object ["+object+"] does not correspond to value meta ["+toStringMeta()+"]"); } } /** * Checks whether or not the value is a String. * @return true if the value is a String. */ public boolean isString() { return type==TYPE_STRING; } /** * Checks whether or not this value is a Date * @return true if the value is a Date */ public boolean isDate() { return type==TYPE_DATE; } /** * Checks whether or not the value is a Big Number * @return true is this value is a big number */ public boolean isBigNumber() { return type==TYPE_BIGNUMBER; } /** * Checks whether or not the value is a Number * @return true is this value is a number */ public boolean isNumber() { return type==TYPE_NUMBER; } /** * Checks whether or not this value is a boolean * @return true if this value has type boolean. */ public boolean isBoolean() { return type==TYPE_BOOLEAN; } /** * Checks whether or not this value is of type Serializable * @return true if this value has type Serializable */ public boolean isSerializableType() { return type == TYPE_SERIALIZABLE; } /** * Checks whether or not this value is of type Binary * @return true if this value has type Binary */ public boolean isBinary() { return type == TYPE_BINARY; } /** * Checks whether or not this value is an Integer * @return true if this value is an integer */ public boolean isInteger() { return type==TYPE_INTEGER; } /** * Checks whether or not this Value is Numeric * A Value is numeric if it is either of type Number or Integer * @return true if the value is either of type Number or Integer */ public boolean isNumeric() { return isInteger() || isNumber() || isBigNumber(); } /** * Checks whether or not the specified type is either Integer or Number * @param t the type to check * @return true if the type is Integer or Number */ public static final boolean isNumeric(int t) { return t==TYPE_INTEGER || t==TYPE_NUMBER || t==TYPE_BIGNUMBER; } public boolean isSortedAscending() { return !isSortedDescending(); } /** * Return the type of a value in a textual form: "String", "Number", "Integer", "Boolean", "Date", ... * @return A String describing the type of value. */ public String getTypeDesc() { return typeCodes[type]; } /** * Return the storage type of a value in a textual form: "normal", "binary-string", "indexes" * @return A String describing the storage type of the value metadata */ public String getStorageTypeDesc() { return storageTypeCodes[storageType]; } public String toString() { return name+" "+toStringMeta(); } /** * a String text representation of this Value, optionally padded to the specified length * @return a String text representation of this Value, optionally padded to the specified length */ public String toStringMeta() { // We (Sven Boden) did explicit performance testing for this // part. The original version used Strings instead of StringBuffers, // performance between the 2 does not differ that much. A few milliseconds // on 100000 iterations in the advantage of StringBuffers. The // lessened creation of objects may be worth it in the long run. StringBuffer retval=new StringBuffer(getTypeDesc()); switch(getType()) { case TYPE_STRING : if (getLength()>0) retval.append('(').append(getLength()).append(')'); break; case TYPE_NUMBER : case TYPE_BIGNUMBER : if (getLength()>0) { retval.append('(').append(getLength()); if (getPrecision()>0) { retval.append(", ").append(getPrecision()); } retval.append(')'); } break; case TYPE_INTEGER: if (getLength()>0) { retval.append('(').append(getLength()).append(')'); } break; default: break; } if (!isStorageNormal()) { retval.append("<").append(getStorageTypeDesc()).append(">"); } return retval.toString(); } public void writeData(DataOutputStream outputStream, Object object) throws KettleFileException { try { // Is the value NULL? outputStream.writeBoolean(object==null); if (object!=null) // otherwise there is no point { switch(storageType) { case STORAGE_TYPE_NORMAL: // Handle Content -- only when not NULL switch(getType()) { case TYPE_STRING : writeString(outputStream, (String)object); break; case TYPE_NUMBER : writeNumber(outputStream, (Double)object); break; case TYPE_INTEGER : writeInteger(outputStream, (Long)object); break; case TYPE_DATE : writeDate(outputStream, (Date)object); break; case TYPE_BIGNUMBER : writeBigNumber(outputStream, (BigDecimal)object); break; case TYPE_BOOLEAN : writeBoolean(outputStream, (Boolean)object); break; case TYPE_BINARY : writeBinary(outputStream, (byte[])object); break; default: throw new KettleFileException(toString()+" : Unable to serialize data type "+getType()); } break; case STORAGE_TYPE_BINARY_STRING: // Handle binary string content -- only when not NULL // In this case, we opt not to convert anything at all for speed. // That way, we can save on CPU power. // Since the streams can be compressed, volume shouldn't be an issue at all. writeBinaryString(outputStream, (byte[])object); break; case STORAGE_TYPE_INDEXED: writeInteger(outputStream, (Integer)object); // just an index break; default: throw new KettleFileException(toString()+" : Unknown storage type "+getStorageType()); } } } catch(ClassCastException e) { throw new RuntimeException(toString()+" : There was a data type error: the data type of "+object.getClass().getName()+" object ["+object+"] does not correspond to value meta ["+toStringMeta()+"]"); } catch(IOException e) { throw new KettleFileException(toString()+" : Unable to write value data to output stream", e); } } public Object readData(DataInputStream inputStream) throws KettleFileException, KettleEOFException, SocketTimeoutException { try { // Is the value NULL? if (inputStream.readBoolean()) return null; // done switch(storageType) { case STORAGE_TYPE_NORMAL: // Handle Content -- only when not NULL switch(getType()) { case TYPE_STRING : return readString(inputStream); case TYPE_NUMBER : return readNumber(inputStream); case TYPE_INTEGER : return readInteger(inputStream); case TYPE_DATE : return readDate(inputStream); case TYPE_BIGNUMBER : return readBigNumber(inputStream); case TYPE_BOOLEAN : return readBoolean(inputStream); case TYPE_BINARY : return readBinary(inputStream); default: throw new KettleFileException(toString()+" : Unable to de-serialize data of type "+getType()); } case STORAGE_TYPE_BINARY_STRING: return readBinaryString(inputStream); case STORAGE_TYPE_INDEXED: return readSmallInteger(inputStream); // just an index: 4-bytes should be enough. default: throw new KettleFileException(toString()+" : Unknown storage type "+getStorageType()); } } catch(EOFException e) { throw new KettleEOFException(e); } catch(SocketTimeoutException e) { throw e; } catch(IOException e) { throw new KettleFileException(toString()+" : Unable to read value data from input stream", e); } } private void writeString(DataOutputStream outputStream, String string) throws IOException { // Write the length and then the bytes if (string==null) { outputStream.writeInt(-1); } else { byte[] chars = string.getBytes(Const.XML_ENCODING); outputStream.writeInt(chars.length); outputStream.write(chars); } } private void writeBinaryString(DataOutputStream outputStream, byte[] binaryString) throws IOException { // Write the length and then the bytes if (binaryString==null) { outputStream.writeInt(-1); } else { outputStream.writeInt(binaryString.length); outputStream.write(binaryString); } } private String readString(DataInputStream inputStream) throws IOException { // Read the length and then the bytes int length = inputStream.readInt(); if (length<0) { return null; } byte[] chars = new byte[length]; inputStream.readFully(chars); String string = new String(chars, Const.XML_ENCODING); // System.out.println("Read string("+getName()+"), length "+length+": "+string); return string; } private byte[] readBinaryString(DataInputStream inputStream) throws IOException { // Read the length and then the bytes int length = inputStream.readInt(); if (length<0) { return null; } byte[] chars = new byte[length]; inputStream.readFully(chars); return chars; } private void writeBigNumber(DataOutputStream outputStream, BigDecimal number) throws IOException { String string = number.toString(); writeString(outputStream, string); } private BigDecimal readBigNumber(DataInputStream inputStream) throws IOException { String string = readString(inputStream); // System.out.println("Read big number("+getName()+") ["+string+"]"); return new BigDecimal(string); } private void writeDate(DataOutputStream outputStream, Date date) throws IOException { outputStream.writeLong(date.getTime()); } private Date readDate(DataInputStream inputStream) throws IOException { long time = inputStream.readLong(); // System.out.println("Read Date("+getName()+") ["+new Date(time)+"]"); return new Date(time); } private void writeBoolean(DataOutputStream outputStream, Boolean bool) throws IOException { outputStream.writeBoolean(bool.booleanValue()); } private Boolean readBoolean(DataInputStream inputStream) throws IOException { Boolean bool = Boolean.valueOf( inputStream.readBoolean() ); // System.out.println("Read boolean("+getName()+") ["+bool+"]"); return bool; } private void writeNumber(DataOutputStream outputStream, Double number) throws IOException { outputStream.writeDouble(number.doubleValue()); } private Double readNumber(DataInputStream inputStream) throws IOException { Double d = new Double( inputStream.readDouble() ); // System.out.println("Read number("+getName()+") ["+d+"]"); return d; } private void writeInteger(DataOutputStream outputStream, Long number) throws IOException { outputStream.writeLong(number.longValue()); } private Long readInteger(DataInputStream inputStream) throws IOException { Long l = new Long( inputStream.readLong() ); // System.out.println("Read integer("+getName()+") ["+l+"]"); return l; } private void writeInteger(DataOutputStream outputStream, Integer number) throws IOException { outputStream.writeInt(number.intValue()); } private Integer readSmallInteger(DataInputStream inputStream) throws IOException { Integer i = Integer.valueOf( inputStream.readInt() ); // System.out.println("Read index integer("+getName()+") ["+i+"]"); return i; } private void writeBinary(DataOutputStream outputStream, byte[] binary) throws IOException { outputStream.writeInt(binary.length); outputStream.write(binary); } private byte[] readBinary(DataInputStream inputStream) throws IOException { int size = inputStream.readInt(); byte[] buffer = new byte[size]; inputStream.readFully(buffer); // System.out.println("Read binary("+getName()+") with size="+size); return buffer; } public void writeMeta(DataOutputStream outputStream) throws KettleFileException { try { int type=getType(); // Handle type outputStream.writeInt(type); // Handle storage type outputStream.writeInt(storageType); switch(storageType) { case STORAGE_TYPE_INDEXED: { // Save the indexed strings... if (index==null) { outputStream.writeInt(-1); // null } else { outputStream.writeInt(index.length); for (int i=0;i<index.length;i++) { try { switch(type) { case TYPE_STRING: writeString(outputStream, (String)index[i]); break; case TYPE_NUMBER: writeNumber(outputStream, (Double)index[i]); break; case TYPE_INTEGER: writeInteger(outputStream, (Long)index[i]); break; case TYPE_DATE: writeDate(outputStream, (Date)index[i]); break; case TYPE_BIGNUMBER: writeBigNumber(outputStream, (BigDecimal)index[i]); break; case TYPE_BOOLEAN: writeBoolean(outputStream, (Boolean)index[i]); break; case TYPE_BINARY: writeBinary(outputStream, (byte[])index[i]); break; default: throw new KettleFileException(toString()+" : Unable to serialize indexe storage type for data type "+getType()); } } catch (ClassCastException e) { throw new RuntimeException(toString()+" : There was a data type error: the data type of "+index[i].getClass().getName()+" object ["+index[i]+"] does not correspond to value meta ["+toStringMeta()+"]"); } } } } break; case STORAGE_TYPE_BINARY_STRING: { // Save the storage meta data... outputStream.writeBoolean(storageMetadata!=null); if (storageMetadata!=null) { storageMetadata.writeMeta(outputStream); } } break; default: break; } // Handle name-length writeString(outputStream, name); // length & precision outputStream.writeInt(getLength()); outputStream.writeInt(getPrecision()); // Origin writeString(outputStream, origin); // Comments writeString(outputStream, comments); // formatting Mask, decimal, grouping, currency writeString(outputStream, conversionMask); writeString(outputStream, decimalSymbol); writeString(outputStream, groupingSymbol); writeString(outputStream, currencySymbol); outputStream.writeInt(trimType); // Case sensitivity of compare outputStream.writeBoolean(caseInsensitive); // Sorting information outputStream.writeBoolean(sortedDescending); // Padding information outputStream.writeBoolean(outputPaddingEnabled); // date format lenient? outputStream.writeBoolean(dateFormatLenient); // date format locale? writeString(outputStream, dateFormatLocale!=null ? dateFormatLocale.toString() : null); } catch(IOException e) { throw new KettleFileException(toString()+" : Unable to write value metadata to output stream", e); } } public ValueMeta(DataInputStream inputStream) throws KettleFileException, KettleEOFException { this(); try { // Handle type type=inputStream.readInt(); // Handle storage type storageType = inputStream.readInt(); // Read the data in the index switch(storageType) { case STORAGE_TYPE_INDEXED: { int indexSize = inputStream.readInt(); if (indexSize<0) { index=null; } else { index=new Object[indexSize]; for (int i=0;i<indexSize;i++) { switch(type) { case TYPE_STRING: index[i] = readString(inputStream); break; case TYPE_NUMBER: index[i] = readNumber(inputStream); break; case TYPE_INTEGER: index[i] = readInteger(inputStream); break; case TYPE_DATE: index[i] = readDate(inputStream); break; case TYPE_BIGNUMBER: index[i] = readBigNumber(inputStream); break; case TYPE_BOOLEAN: index[i] = readBoolean(inputStream); break; case TYPE_BINARY: index[i] = readBinary(inputStream); break; default: throw new KettleFileException(toString()+" : Unable to de-serialize indexed storage type for data type "+getType()); } } } } break; case STORAGE_TYPE_BINARY_STRING: { // In case we do have storage metadata defined, we read that back in as well.. if (inputStream.readBoolean()) { storageMetadata = new ValueMeta(inputStream); } } break; default: break; } // name name = readString(inputStream); // length & precision length = inputStream.readInt(); precision = inputStream.readInt(); // Origin origin = readString(inputStream); // Comments comments=readString(inputStream); // formatting Mask, decimal, grouping, currency conversionMask=readString(inputStream); decimalSymbol=readString(inputStream); groupingSymbol=readString(inputStream); currencySymbol=readString(inputStream); trimType=inputStream.readInt(); // Case sensitivity caseInsensitive = inputStream.readBoolean(); // Sorting type sortedDescending = inputStream.readBoolean(); // Output padding? outputPaddingEnabled = inputStream.readBoolean(); // is date parsing lenient? dateFormatLenient = inputStream.readBoolean(); String strDateFormatLocale = readString(inputStream); if (Const.isEmpty(strDateFormatLocale)) { dateFormatLocale = null; } else { dateFormatLocale = new Locale(strDateFormatLocale); } } catch(EOFException e) { throw new KettleEOFException(e); } catch(IOException e) { throw new KettleFileException(toString()+" : Unable to read value metadata from input stream", e); } } public String getMetaXML() throws IOException { StringBuffer xml = new StringBuffer(); xml.append(XMLHandler.openTag(XML_META_TAG)); xml.append( XMLHandler.addTagValue("type", getTypeDesc()) ) ; xml.append( XMLHandler.addTagValue("storagetype", getStorageType()) ); switch(storageType) { case STORAGE_TYPE_INDEXED: { xml.append( XMLHandler.openTag("index")); // Save the indexed strings... if (index!=null) { for (int i=0;i<index.length;i++) { try { switch(type) { case TYPE_STRING: xml.append( XMLHandler.addTagValue( "value", (String)index[i]) ); break; case TYPE_NUMBER: xml.append( XMLHandler.addTagValue( "value", (Double)index[i]) ); break; case TYPE_INTEGER: xml.append( XMLHandler.addTagValue( "value", (Long)index[i]) ); break; case TYPE_DATE: xml.append( XMLHandler.addTagValue( "value", (Date)index[i]) ); break; case TYPE_BIGNUMBER: xml.append( XMLHandler.addTagValue( "value", (BigDecimal)index[i]) ); break; case TYPE_BOOLEAN: xml.append( XMLHandler.addTagValue( "value", (Boolean)index[i]) ); break; case TYPE_BINARY: xml.append( XMLHandler.addTagValue( "value", (byte[])index[i]) ); break; default: throw new IOException(toString()+" : Unable to serialize indexe storage type to XML for data type "+getType()); } } catch (ClassCastException e) { throw new RuntimeException(toString()+" : There was a data type error: the data type of "+index[i].getClass().getName()+" object ["+index[i]+"] does not correspond to value meta ["+toStringMeta()+"]"); } } } xml.append( XMLHandler.closeTag("index")); } break; case STORAGE_TYPE_BINARY_STRING: { // Save the storage meta data... if (storageMetadata!=null) { xml.append(XMLHandler.openTag("storage-meta")); xml.append(storageMetadata.getMetaXML()); xml.append(XMLHandler.closeTag("storage-meta")); } } break; default: break; } xml.append( XMLHandler.addTagValue("name", name) ); xml.append( XMLHandler.addTagValue("length", length) ); xml.append( XMLHandler.addTagValue("precision", precision) ); xml.append( XMLHandler.addTagValue("origin", origin) ); xml.append( XMLHandler.addTagValue("comments", comments) ); xml.append( XMLHandler.addTagValue("conversion_Mask", conversionMask) ); xml.append( XMLHandler.addTagValue("decimal_symbol", decimalSymbol) ); xml.append( XMLHandler.addTagValue("grouping_symbol", groupingSymbol) ); xml.append( XMLHandler.addTagValue("currency_symbol", currencySymbol) ); xml.append( XMLHandler.addTagValue("trim_type", getTrimTypeCode(trimType)) ); xml.append( XMLHandler.addTagValue("case_insensitive", caseInsensitive) ); xml.append( XMLHandler.addTagValue("sort_descending", sortedDescending) ); xml.append( XMLHandler.addTagValue("output_padding", outputPaddingEnabled) ); xml.append( XMLHandler.addTagValue("date_format_lenient", dateFormatLenient) ); xml.append( XMLHandler.addTagValue("date_format_locale", dateFormatLocale.toString()) ); xml.append(XMLHandler.closeTag(XML_META_TAG)); return xml.toString(); } public ValueMeta(Node node) throws IOException { this(); type = getType( XMLHandler.getTagValue(node, "type") ) ; storageType = getStorageType( XMLHandler.getTagValue(node, "storagetype") ); switch(storageType) { case STORAGE_TYPE_INDEXED: { Node indexNode = XMLHandler.getSubNode(node, "index"); int nrIndexes = XMLHandler.countNodes(indexNode, "value"); index = new Object[nrIndexes]; for (int i=0;i<index.length;i++) { Node valueNode = XMLHandler.getSubNodeByNr(indexNode, "value", i); String valueString = XMLHandler.getNodeValue(valueNode); if (Const.isEmpty(valueString)) { index[i] = null; } else { switch(type) { case TYPE_STRING: index[i] = valueString; break; case TYPE_NUMBER: index[i] = Double.parseDouble( valueString ); break; case TYPE_INTEGER: index[i] = Long.parseLong( valueString ); break; case TYPE_DATE: index[i] = XMLHandler.stringToDate( valueString ); ; break; case TYPE_BIGNUMBER: index[i] = new BigDecimal( valueString ); ; break; case TYPE_BOOLEAN: index[i] = Boolean.valueOf("Y".equalsIgnoreCase( valueString)); break; case TYPE_BINARY: index[i] = XMLHandler.stringToBinary( valueString ); break; default: throw new IOException(toString()+" : Unable to de-serialize indexe storage type from XML for data type "+getType()); } } } } break; case STORAGE_TYPE_BINARY_STRING: { // Save the storage meta data... Node storageMetaNode = XMLHandler.getSubNode(node, "storage-meta"); if (storageMetaNode!=null) { storageMetadata = new ValueMeta(storageMetaNode); } } break; default: break; } name = XMLHandler.getTagValue(node, "name"); length = Integer.parseInt( XMLHandler.getTagValue(node, "length") ); precision = Integer.parseInt( XMLHandler.getTagValue(node, "precision") ); origin = XMLHandler.getTagValue(node, "origin"); comments = XMLHandler.getTagValue(node, "comments"); conversionMask = XMLHandler.getTagValue(node, "conversion_Mask"); decimalSymbol = XMLHandler.getTagValue(node, "decimal_symbol"); groupingSymbol = XMLHandler.getTagValue(node, "grouping_symbol"); currencySymbol = XMLHandler.getTagValue(node, "currency_symbol"); trimType = getTrimTypeByCode( XMLHandler.getTagValue(node, "trim_type") ); caseInsensitive = "Y".equalsIgnoreCase( XMLHandler.getTagValue(node, "case_insensitive") ); sortedDescending = "Y".equalsIgnoreCase( XMLHandler.getTagValue(node, "sort_descending") ); outputPaddingEnabled = "Y".equalsIgnoreCase( XMLHandler.getTagValue(node, "output_padding") ); dateFormatLenient = "Y".equalsIgnoreCase( XMLHandler.getTagValue(node, "date_format_lenient") ); String dateFormatLocaleString = XMLHandler.getTagValue(node, "date_format_locale"); if (!Const.isEmpty( dateFormatLocaleString )) { dateFormatLocale = new Locale(dateFormatLocaleString); } } public String getDataXML(Object object) throws IOException { StringBuffer xml = new StringBuffer(); xml.append(XMLHandler.openTag(XML_DATA_TAG)); if (object!=null) // otherwise there is no point { try { switch(storageType) { case STORAGE_TYPE_NORMAL: // Handle Content -- only when not NULL switch(getType()) { case TYPE_STRING : xml.append( XMLHandler.addTagValue("string-value", (String)object) ); break; case TYPE_NUMBER : xml.append( XMLHandler.addTagValue("number-value", (Double)object) ); break; case TYPE_INTEGER : xml.append( XMLHandler.addTagValue("integer-value", (Long)object) ); break; case TYPE_DATE : xml.append( XMLHandler.addTagValue("date-value", (Date)object) ); break; case TYPE_BIGNUMBER : xml.append( XMLHandler.addTagValue("bignumber-value", (BigDecimal)object) ); break; case TYPE_BOOLEAN : xml.append( XMLHandler.addTagValue("boolean-value", (Boolean)object) ); break; case TYPE_BINARY : xml.append( XMLHandler.addTagValue("binary-value", (byte[])object) ); break; default: throw new IOException(toString()+" : Unable to serialize data type to XML "+getType()); } break; case STORAGE_TYPE_BINARY_STRING: // Handle binary string content -- only when not NULL // In this case, we opt not to convert anything at all for speed. // That way, we can save on CPU power. // Since the streams can be compressed, volume shouldn't be an issue at all. xml.append( XMLHandler.addTagValue("binary-string", (byte[])object) ); break; case STORAGE_TYPE_INDEXED: xml.append( XMLHandler.addTagValue("index-value", (Integer)object) ); // just an index break; default: throw new IOException(toString()+" : Unknown storage type "+getStorageType()); } } catch (ClassCastException e) { throw new RuntimeException(toString()+" : There was a data type error: the data type of "+object.getClass().getName()+" object ["+object+"] does not correspond to value meta ["+toStringMeta()+"]"); } } xml.append(XMLHandler.closeTag(XML_META_TAG)); return xml.toString(); } /** * Convert a data XML node to an Object that corresponds to the metadata. * This is basically String to Object conversion that is being done. * @param node the node to retrieve the data value from * @return the converted data value * @throws IOException thrown in case there is a problem with the XML to object conversion */ public Object getValue(Node node) throws IOException { switch(storageType) { case STORAGE_TYPE_NORMAL: String valueString = XMLHandler.getTagValue(node, "value"); if (Const.isEmpty(valueString)) return null; // Handle Content -- only when not NULL switch(getType()) { case TYPE_STRING: return valueString; case TYPE_NUMBER: return Double.parseDouble( valueString ); case TYPE_INTEGER: return Long.parseLong( valueString ); case TYPE_DATE: return XMLHandler.stringToDate( valueString ); case TYPE_BIGNUMBER: return new BigDecimal( valueString ); case TYPE_BOOLEAN: return Boolean.valueOf("Y".equalsIgnoreCase( valueString)); case TYPE_BINARY: return XMLHandler.stringToBinary( valueString ); default: throw new IOException(toString()+" : Unable to de-serialize '"+valueString+"' from XML for data type "+getType()); } case STORAGE_TYPE_BINARY_STRING: // Handle binary string content -- only when not NULL // In this case, we opt not to convert anything at all for speed. // That way, we can save on CPU power. // Since the streams can be compressed, volume shouldn't be an issue at all. String binaryString = XMLHandler.getTagValue(node, "binary-string"); if (Const.isEmpty(binaryString)) return null; return XMLHandler.stringToBinary(binaryString); case STORAGE_TYPE_INDEXED: String indexString = XMLHandler.getTagValue(node, "index-value"); if (Const.isEmpty(indexString)) return null; return Integer.parseInt(indexString); default: throw new IOException(toString()+" : Unknown storage type "+getStorageType()); } } /** * get an array of String describing the possible types a Value can have. * @return an array of String describing the possible types a Value can have. */ public static final String[] getTypes() { String retval[] = new String[typeCodes.length-1]; System.arraycopy(typeCodes, 1, retval, 0, typeCodes.length-1); return retval; } /** * Get an array of String describing the possible types a Value can have. * @return an array of String describing the possible types a Value can have. */ public static final String[] getAllTypes() { String retval[] = new String[typeCodes.length]; System.arraycopy(typeCodes, 0, retval, 0, typeCodes.length); return retval; } /** * TODO: change Desc to Code all over the place. Make sure we can localise this stuff later on. * * @param type the type * @return the description (code) of the type */ public static final String getTypeDesc(int type) { return typeCodes[type]; } /** * Convert the String description of a type to an integer type. * @param desc The description of the type to convert * @return The integer type of the given String. (ValueMetaInterface.TYPE_...) */ public static final int getType(String desc) { for (int i=1;i<typeCodes.length;i++) { if (typeCodes[i].equalsIgnoreCase(desc)) { return i; } } return TYPE_NONE; } /** * Convert the String description of a storage type to an integer type. * @param desc The description of the storage type to convert * @return The integer storage type of the given String. (ValueMetaInterface.STORAGE_TYPE_...) or -1 if the storage type code not be found. */ public static final int getStorageType(String desc) { for (int i=0;i<storageTypeCodes.length;i++) { if (storageTypeCodes[i].equalsIgnoreCase(desc)) { return i; } } return -1; } public static final String getStorageTypeCode(int storageType) { if (storageType>=STORAGE_TYPE_NORMAL && storageType<=STORAGE_TYPE_INDEXED) { return storageTypeCodes[storageType]; } return null; } /** * Determine if an object is null. * This is the case if data==null or if it's an empty string. * @param data the object to test * @return true if the object is considered null. */ public boolean isNull(Object data) { try{ if (data==null) return true; if (isString()) { if (isStorageNormal() && ((String)data).length()==0) return true; if (isStorageBinaryString()) { if ( ((byte[])data).length==0 ) return true; } } return false; } catch(ClassCastException e) { throw new RuntimeException("Unable to verify if ["+toString()+"] is null or not because of an error:"+e.toString(), e); } } /* * Compare 2 binary strings, one byte at a time.<br> * This algorithm is very fast but most likely wrong as well.<br> * * @param one The first binary string to compare with * @param two the second binary string to compare to * @return -1 if <i>one</i> is smaller than <i>two</i>, 0 is both byte arrays are identical and 1 if <i>one</i> is larger than <i>two</i> private int compareBinaryStrings(byte[] one, byte[] two) { for (int i=0;i<one.length;i++) { if (i>=two.length) return 1; // larger if (one[i]>two[i]) return 1; // larger if (one[i]<two[i]) return -1; // smaller } if (one.length>two.length) return 1; // larger if (one.length>two.length) return -11; // smaller return 0; } */ /** * Compare 2 values of the same data type * @param data1 the first value * @param data2 the second value * @return 0 if the values are equal, -1 if data1 is smaller than data2 and +1 if it's larger. * @throws KettleValueException In case we get conversion errors */ public int compare(Object data1, Object data2) throws KettleValueException { boolean n1 = isNull(data1); boolean n2 = isNull(data2); // null is always smaller! if (n1 && !n2) return -1; if (!n1 && n2) return 1; if (n1 && n2) return 0; int cmp=0; switch (getType()) { case TYPE_STRING: { // if (isStorageBinaryString() && identicalFormat && storageMetadata.isSingleByteEncoding()) return compareBinaryStrings((byte[])data1, (byte[])data2); TODO String one = Const.rtrim(getString(data1)); String two = Const.rtrim(getString(data2)); if (caseInsensitive) { cmp = one.compareToIgnoreCase(two); } else { cmp = one.compareTo(two); } } break; case TYPE_INTEGER: { // if (isStorageBinaryString() && identicalFormat) return compareBinaryStrings((byte[])data1, (byte[])data2); TODO long compare = getInteger(data1).longValue() - getInteger(data2).longValue(); if (compare<0) cmp=-1; else if (compare>0) cmp=1; else cmp=0; } break; case TYPE_NUMBER: { cmp=Double.compare(getNumber(data1).doubleValue(), getNumber(data2).doubleValue()); } break; case TYPE_DATE: { long compare = getDate(data1).getTime() - getDate(data2).getTime(); if (compare<0) cmp=-1; else if (compare>0) cmp=1; else cmp=0; } break; case TYPE_BIGNUMBER: { cmp=getBigNumber(data1).compareTo(getBigNumber(data2)); } break; case TYPE_BOOLEAN: { if (getBoolean(data1).booleanValue() == getBoolean(data2).booleanValue()) cmp=0; // true == true, false == false else if (getBoolean(data1).booleanValue() && !getBoolean(data2).booleanValue()) cmp=1; // true > false else cmp=-1; // false < true } break; case TYPE_BINARY: { byte[] b1 = (byte[]) data1; byte[] b2 = (byte[]) data2; int length= b1.length < b2.length ? b1.length : b2.length; for (int i=0;i<length;i++) { cmp = b1[i] - b2[i]; if (cmp!=0) { cmp = Math.abs(cmp); break; } } } break; default: throw new KettleValueException(toString()+" : Comparing values can not be done with data type : "+getType()); } if (isSortedDescending()) { return -cmp; } else { return cmp; } } /** * Compare 2 values of the same data type * @param data1 the first value * @param meta2 the second value's metadata * @param data2 the second value * @return 0 if the values are equal, -1 if data1 is smaller than data2 and +1 if it's larger. * @throws KettleValueException In case we get conversion errors */ public int compare(Object data1, ValueMetaInterface meta2, Object data2) throws KettleValueException { if (meta2==null) { throw new KettleValueException(toStringMeta()+" : Second meta data (meta2) is null, please check one of the previous steps."); } try { // Before we can compare data1 to data2 we need to make sure they have the same data type etc. if (getType()==meta2.getType()) { if (getStorageType()==meta2.getStorageType()) return compare(data1, data2); // Convert the storage type to compare the data. switch(getStorageType()) { case STORAGE_TYPE_NORMAL : return compare(data1, meta2.convertToNormalStorageType(data2)); case STORAGE_TYPE_BINARY_STRING : return compare(data1, meta2.convertToBinaryStringStorageType(data2)); case STORAGE_TYPE_INDEXED : switch(meta2.getStorageType()) { case STORAGE_TYPE_INDEXED: return compare(data1, data2); // not accessible, just to make sure. case STORAGE_TYPE_NORMAL: return -meta2.compare(data2, convertToNormalStorageType(data1)); case STORAGE_TYPE_BINARY_STRING: return -meta2.compare(data2, convertToBinaryStringStorageType(data1)); default: throw new KettleValueException(meta2.toStringMeta()+" : Unknown storage type : "+meta2.getStorageType()); } default: throw new KettleValueException(toStringMeta()+" : Unknown storage type : "+getStorageType()); } } // If the data types are not the same, the first one is the driver... // The second data type is converted to the first one. return compare(data1, convertData(meta2, data2)); } catch(Exception e) { throw new KettleValueException(toStringMeta()+" : Unable to compare with value ["+meta2.toStringMeta()+"]", e); } } /** * Convert the specified data to the data type specified in this object. * @param meta2 the metadata of the object to be converted * @param data2 the data of the object to be converted * @return the object in the data type of this value metadata object * @throws KettleValueException in case there is a data conversion error */ public Object convertData(ValueMetaInterface meta2, Object data2) throws KettleValueException { switch(getType()) { case TYPE_STRING : return meta2.getString(data2); case TYPE_NUMBER : return meta2.getNumber(data2); case TYPE_INTEGER : return meta2.getInteger(data2); case TYPE_DATE : return meta2.getDate(data2); case TYPE_BIGNUMBER : return meta2.getBigNumber(data2); case TYPE_BOOLEAN : return meta2.getBoolean(data2); case TYPE_BINARY : return meta2.getBinary(data2); default: throw new KettleValueException(toString()+" : I can't convert the specified value to data type : "+getType()); } } /** * Convert the specified data to the data type specified in this object. * For String conversion, be compatible with version 2.5.2. * * @param meta2 the metadata of the object to be converted * @param data2 the data of the object to be converted * @return the object in the data type of this value metadata object * @throws KettleValueException in case there is a data conversion error */ public Object convertDataCompatible(ValueMetaInterface meta2, Object data2) throws KettleValueException { switch(getType()) { case TYPE_STRING : return meta2.getCompatibleString(data2); case TYPE_NUMBER : return meta2.getNumber(data2); case TYPE_INTEGER : return meta2.getInteger(data2); case TYPE_DATE : return meta2.getDate(data2); case TYPE_BIGNUMBER : return meta2.getBigNumber(data2); case TYPE_BOOLEAN : return meta2.getBoolean(data2); case TYPE_BINARY : return meta2.getBinary(data2); default: throw new KettleValueException(toString()+" : I can't convert the specified value to data type : "+getType()); } } /** * Convert an object to the data type specified in the conversion metadata * @param data The data * @return The data converted to the storage data type * @throws KettleValueException in case there is a conversion error. */ public Object convertDataUsingConversionMetaData(Object data2) throws KettleValueException { if (conversionMetadata==null) { throw new KettleValueException("API coding error: please specify the conversion metadata before attempting to convert value "+name); } // Suppose we have an Integer 123, length 5 // The string variation of this is " 00123" // To convert this back to an Integer we use the storage metadata // Specifically, in method convertStringToInteger() we consult the storageMetaData to get the correct conversion mask // That way we're always sure that a conversion works both ways. switch(conversionMetadata.getType()) { case TYPE_STRING : return getString(data2); case TYPE_INTEGER : return getInteger(data2); case TYPE_NUMBER : return getNumber(data2); case TYPE_DATE : return getDate(data2); case TYPE_BIGNUMBER : return getBigNumber(data2); case TYPE_BOOLEAN : return getBoolean(data2); case TYPE_BINARY : return getBinary(data2); default: throw new KettleValueException(toString()+" : I can't convert the specified value to data type : "+storageMetadata.getType()); } } /** * Convert the specified string to the data type specified in this object. * @param pol the string to be converted * @param convertMeta the metadata of the object (only string type) to be converted * @param nullIf set the object to null if pos equals nullif (IgnoreCase) * @param ifNull set the object to ifNull when pol is empty or null * @param trim_type the trim type to be used (ValueMetaInterface.TRIM_TYPE_XXX) * @return the object in the data type of this value metadata object * @throws KettleValueException in case there is a data conversion error */ public Object convertDataFromString(String pol, ValueMetaInterface convertMeta, String nullIf, String ifNull, int trim_type) throws KettleValueException { // null handling and conversion of value to null String null_value = nullIf; if (null_value == null) { switch (convertMeta.getType()) { case Value.VALUE_TYPE_BOOLEAN: null_value = Const.NULL_BOOLEAN; break; case Value.VALUE_TYPE_STRING: null_value = Const.NULL_STRING; break; case Value.VALUE_TYPE_BIGNUMBER: null_value = Const.NULL_BIGNUMBER; break; case Value.VALUE_TYPE_NUMBER: null_value = Const.NULL_NUMBER; break; case Value.VALUE_TYPE_INTEGER: null_value = Const.NULL_INTEGER; break; case Value.VALUE_TYPE_DATE: null_value = Const.NULL_DATE; break; case Value.VALUE_TYPE_BINARY: null_value = Const.NULL_BINARY; break; default: null_value = Const.NULL_NONE; break; } } // See if we need to convert a null value into a String // For example, we might want to convert null into "Empty". if (!Const.isEmpty(ifNull)) { // Note that you can't pull the pad method up here as a nullComp variable because you could get an NPE since you haven't checked isEmpty(pol) yet! if (Const.isEmpty(pol) || pol.equalsIgnoreCase(Const.rightPad(new StringBuffer(null_value), pol.length()))) { pol = ifNull; } } // See if the polled value is empty // In that case, we have a null value on our hands... if (Const.isEmpty(pol)) { return null; } else { // if the null_value is specified, we try to match with that. if (!Const.isEmpty(null_value)) { if (null_value.length()<=pol.length()) { // If the polled value is equal to the spaces right-padded null_value, we have a match if (pol.equalsIgnoreCase(Const.rightPad(new StringBuffer(null_value), pol.length()))) { return null; } } } else { // Verify if there are only spaces in the polled value... // We consider that empty as well... if (Const.onlySpaces(pol)) { return null; } } } // Trimming switch (trim_type) { case ValueMetaInterface.TRIM_TYPE_LEFT: { StringBuffer strpol = new StringBuffer(pol); while (strpol.length() > 0 && strpol.charAt(0) == ' ') strpol.deleteCharAt(0); pol=strpol.toString(); } break; case ValueMetaInterface.TRIM_TYPE_RIGHT: { StringBuffer strpol = new StringBuffer(pol); while (strpol.length() > 0 && strpol.charAt(strpol.length() - 1) == ' ') strpol.deleteCharAt(strpol.length() - 1); pol=strpol.toString(); } break; case ValueMetaInterface.TRIM_TYPE_BOTH: StringBuffer strpol = new StringBuffer(pol); { while (strpol.length() > 0 && strpol.charAt(0) == ' ') strpol.deleteCharAt(0); while (strpol.length() > 0 && strpol.charAt(strpol.length() - 1) == ' ') strpol.deleteCharAt(strpol.length() - 1); pol=strpol.toString(); } break; default: break; } // On with the regular program... // Simply call the ValueMeta routines to do the conversion // We need to do some effort here: copy all return convertData(convertMeta, pol); } /** * Calculate the hashcode of the specified data object * @param object the data value to calculate a hashcode for * @return the calculated hashcode * @throws KettleValueException */ public int hashCode(Object object) throws KettleValueException { int hash=0; if (isNull(object)) { switch(getType()) { case TYPE_BOOLEAN : hash^= 1; break; case TYPE_DATE : hash^= 2; break; case TYPE_NUMBER : hash^= 4; break; case TYPE_STRING : hash^= 8; break; case TYPE_INTEGER : hash^=16; break; case TYPE_BIGNUMBER : hash^=32; break; case TYPE_NONE : break; default: break; } } else { switch(getType()) { case TYPE_BOOLEAN : hash^=getBoolean(object).hashCode(); break; case TYPE_DATE : hash^=getDate(object).hashCode(); break; case TYPE_INTEGER : hash^=getInteger(object).hashCode(); break; case TYPE_NUMBER : hash^=getNumber(object).hashCode(); break; case TYPE_STRING : hash^=getString(object).hashCode(); break; case TYPE_BIGNUMBER : hash^=getBigNumber(object).hashCode(); break; case TYPE_NONE : break; default: break; } } return hash; } /** * Create an old-style value for backward compatibility reasons * @param data the data to store in the value * @return a newly created Value object * @throws KettleValueException case there is a data conversion problem */ public Value createOriginalValue(Object data) throws KettleValueException { Value value = new Value(name, type); value.setLength(length, precision); if (isNull(data)) { value.setNull(); } else { switch(value.getType()) { case TYPE_STRING : value.setValue( getString(data) ); break; case TYPE_NUMBER : value.setValue( getNumber(data).doubleValue() ); break; case TYPE_INTEGER : value.setValue( getInteger(data).longValue() ); break; case TYPE_DATE : value.setValue( getDate(data) ); break; case TYPE_BOOLEAN : value.setValue( getBoolean(data).booleanValue() ); break; case TYPE_BIGNUMBER : value.setValue( getBigNumber(data) ); break; case TYPE_BINARY : value.setValue( getBinary(data) ); break; default: throw new KettleValueException(toString()+" : We can't convert data type "+getTypeDesc()+" to an original (V2) Value"); } } return value; } /** * Extracts the primitive data from an old style Value object * @param value the old style Value object * @return the value's data, NOT the meta data. * @throws KettleValueException case there is a data conversion problem */ public Object getValueData(Value value) throws KettleValueException { if (value==null || value.isNull()) return null; // So far the old types and the new types map to the same thing. // For compatibility we just ask the old-style value to convert to the new one. // In the old transformation this would happen sooner or later anyway. // It doesn't throw exceptions or complain either (unfortunately). switch(getType()) { case ValueMetaInterface.TYPE_STRING : return value.getString(); case ValueMetaInterface.TYPE_NUMBER : return value.getNumber(); case ValueMetaInterface.TYPE_INTEGER : return value.getInteger(); case ValueMetaInterface.TYPE_DATE : return value.getDate(); case ValueMetaInterface.TYPE_BOOLEAN : return value.getBoolean(); case ValueMetaInterface.TYPE_BIGNUMBER : return value.getBigNumber(); case ValueMetaInterface.TYPE_BINARY : return value.getBytes(); default: throw new KettleValueException(toString()+" : We can't convert original data type "+value.getTypeDesc()+" to a primitive data type"); } } /** * @return the storageMetadata */ public ValueMetaInterface getStorageMetadata() { return storageMetadata; } /** * @param storageMetadata the storageMetadata to set */ public void setStorageMetadata(ValueMetaInterface storageMetadata) { this.storageMetadata = storageMetadata; compareStorageAndActualFormat(); } private void compareStorageAndActualFormat() { if (storageMetadata==null) { identicalFormat = true; } else { // If a trim type is set, we need to at least try to trim the strings. // In that case, we have to set the identical format off. if (trimType!=TRIM_TYPE_NONE) { identicalFormat = false; } else { // If there is a string encoding set and it's the same encoding in the binary string, then we don't have to convert // If there are no encodings set, then we're certain we don't have to convert as well. if (getStringEncoding()!=null && getStringEncoding().equals(storageMetadata.getStringEncoding()) || getStringEncoding()==null && storageMetadata.getStringEncoding()==null) { // However, perhaps the conversion mask changed since we read the binary string? // The output can be different from the input. If the mask is different, we need to do conversions. // Otherwise, we can just ignore it... if (isDate()) { if ( (getConversionMask()!=null && getConversionMask().equals(storageMetadata.getConversionMask())) || (getConversionMask()==null && storageMetadata.getConversionMask()==null) ) { identicalFormat = true; } else { identicalFormat = false; } } else if (isNumeric()) { // Check the lengths first if (getLength()!=storageMetadata.getLength()) identicalFormat=false; else if (getPrecision()!=storageMetadata.getPrecision()) identicalFormat=false; else // For the same reasons as above, if the conversion mask, the decimal or the grouping symbol changes // we need to convert from the binary strings to the target data type and then back to a string in the required format. if ( (getConversionMask()!=null && getConversionMask().equals(storageMetadata.getConversionMask()) || (getConversionMask()==null && storageMetadata.getConversionMask()==null)) ) { if ( (getGroupingSymbol()!=null && getGroupingSymbol().equals(storageMetadata.getGroupingSymbol())) || (getConversionMask()==null && storageMetadata.getConversionMask()==null) ) { if ( (getDecimalFormat()!=null && getDecimalFormat().equals(storageMetadata.getDecimalFormat())) || (getDecimalFormat()==null && storageMetadata.getDecimalFormat()==null) ) { identicalFormat = true; } else { identicalFormat = false; } } else { identicalFormat = false; } } else { identicalFormat = false; } } } } } } /** * @return the trimType */ public int getTrimType() { return trimType; } /** * @param trimType the trimType to set */ public void setTrimType(int trimType) { this.trimType = trimType; } public final static int getTrimTypeByCode(String tt) { if (tt == null) return 0; for (int i = 0; i < trimTypeCode.length; i++) { if (trimTypeCode[i].equalsIgnoreCase(tt)) return i; } return 0; } public final static int getTrimTypeByDesc(String tt) { if (tt == null) return 0; for (int i = 0; i < trimTypeDesc.length; i++) { if (trimTypeDesc[i].equalsIgnoreCase(tt)) return i; } // If this fails, try to match using the code. return getTrimTypeByCode(tt); } public final static String getTrimTypeCode(int i) { if (i < 0 || i >= trimTypeCode.length) return trimTypeCode[0]; return trimTypeCode[i]; } public final static String getTrimTypeDesc(int i) { if (i < 0 || i >= trimTypeDesc.length) return trimTypeDesc[0]; return trimTypeDesc[i]; } /** * @return the conversionMetadata */ public ValueMetaInterface getConversionMetadata() { return conversionMetadata; } /** * @param conversionMetadata the conversionMetadata to set */ public void setConversionMetadata(ValueMetaInterface conversionMetadata) { this.conversionMetadata = conversionMetadata; } /** * @return true if the String encoding used (storage) is single byte encoded. */ public boolean isSingleByteEncoding() { return singleByteEncoding; } /** * @return the number of binary string to native data type conversions done with this object conversions */ public long getNumberOfBinaryStringConversions() { return numberOfBinaryStringConversions; } /** * @param numberOfBinaryStringConversions the number of binary string to native data type done with this object conversions to set */ public void setNumberOfBinaryStringConversions(long numberOfBinaryStringConversions) { this.numberOfBinaryStringConversions = numberOfBinaryStringConversions; } }
package org.apache.xerces.dom; import java.io.*; import java.util.Vector; import java.util.Enumeration; import org.w3c.dom.*; import org.apache.xerces.dom.events.MutationEventImpl; import org.w3c.dom.events.*; /** * AttributeMap inherits from NamedNodeMapImpl and extends it to deal with the * specifics of storing attributes. These are: * <ul> * <li>managing ownership of attribute nodes * <li>managing default attributes * <li>firing mutation events * </ul> * */ public class AttributeMap extends NamedNodeMapImpl { // Constructors /** Constructs a named node map. */ protected AttributeMap(ElementImpl ownerNode, NamedNodeMapImpl defaults) { super(ownerNode); if (defaults != null) { // initialize map with the defaults cloneContent(defaults); if (nodes != null) { hasDefaults(true); } } } /** * Adds an attribute using its nodeName attribute. * @see org.w3c.dom.NamedNodeMap#setNamedItem * @return If the new Node replaces an existing node the replaced Node is * returned, otherwise null is returned. * @param arg * An Attr node to store in this map. * @exception org.w3c.dom.DOMException The exception description. */ public Node setNamedItem(Node arg) throws DOMException { if (isReadOnly()) { throw new DOMException(DOMException.NO_MODIFICATION_ALLOWED_ERR, "DOM001 Modification not allowed"); } if(arg.getOwnerDocument() != ownerNode.ownerDocument()) { throw new DOMException(DOMException.WRONG_DOCUMENT_ERR, "DOM005 Wrong document"); } NodeImpl argn = (NodeImpl)arg; if (argn.isOwned()) { throw new DOMException(DOMException.INUSE_ATTRIBUTE_ERR, "DOM009 Attribute already in use"); } // set owner argn.ownerNode = ownerNode; argn.isOwned(true); int i = findNamePoint(arg.getNodeName(),0); NodeImpl previous = null; if (i >= 0) { previous = (NodeImpl) nodes.elementAt(i); nodes.setElementAt(arg,i); previous.ownerNode = ownerNode.ownerDocument(); previous.isOwned(false); // make sure it won't be mistaken with defaults in case it's reused previous.isSpecified(true); } else { i = -1 - i; // Insert point (may be end of list) if (null == nodes) { nodes = new Vector(5, 10); } nodes.insertElementAt(arg, i); } if (NodeImpl.MUTATIONEVENTS && ownerNode.ownerDocument().mutationEvents) { // MUTATION POST-EVENTS: ownerNode.dispatchAggregateEvents( (AttrImpl)arg, previous==null ? null : previous.getNodeValue(), previous==null ? MutationEvent.ADDITION : MutationEvent.MODIFICATION ); } return previous; } // setNamedItem(Node):Node /** * Adds an attribute using its namespaceURI and localName. * @see org.w3c.dom.NamedNodeMap#setNamedItem * @return If the new Node replaces an existing node the replaced Node is * returned, otherwise null is returned. * @param arg A node to store in a named node map. */ public Node setNamedItemNS(Node arg) throws DOMException { if (isReadOnly()) { throw new DOMException(DOMException.NO_MODIFICATION_ALLOWED_ERR, "DOM001 Modification not allowed"); } if(arg.getOwnerDocument() != ownerNode.ownerDocument()) { throw new DOMException(DOMException.WRONG_DOCUMENT_ERR, "DOM005 Wrong document"); } NodeImpl argn = (NodeImpl)arg; if (argn.isOwned()) { throw new DOMException(DOMException.INUSE_ATTRIBUTE_ERR, "DOM009 Attribute already in use"); } // set owner argn.ownerNode = ownerNode; argn.isOwned(true); int i = findNamePoint(argn.getNamespaceURI(), argn.getLocalName()); NodeImpl previous = null; if (i >= 0) { previous = (NodeImpl) nodes.elementAt(i); nodes.setElementAt(arg,i); previous.ownerNode = ownerNode.ownerDocument(); previous.isOwned(false); // make sure it won't be mistaken with defaults in case it's reused previous.isSpecified(true); } else { // If we can't find by namespaceURI, localName, then we find by // nodeName so we know where to insert. i = findNamePoint(arg.getNodeName(),0); if (i >=0) { previous = (NodeImpl) nodes.elementAt(i); nodes.insertElementAt(arg,i); } else { i = -1 - i; // Insert point (may be end of list) if (null == nodes) { nodes = new Vector(5, 10); } nodes.insertElementAt(arg, i); } } // changed(true); // Only NamedNodeMaps containing attributes (those which are // bound to an element) need report MutationEvents if (NodeImpl.MUTATIONEVENTS && ownerNode.ownerDocument().mutationEvents) { // MUTATION POST-EVENTS: ownerNode.dispatchAggregateEvents( (AttrImpl)arg, previous==null ? null : previous.getNodeValue(), previous==null ? MutationEvent.ADDITION : MutationEvent.MODIFICATION ); } return previous; } // setNamedItem(Node):Node /** * Removes an attribute specified by name. * @param name * The name of a node to remove. If the * removed attribute is known to have a default value, an * attribute immediately appears containing the default value * as well as the corresponding namespace URI, local name, * and prefix when applicable. * @return The node removed from the map if a node with such a name exists. * @throws NOT_FOUND_ERR: Raised if there is no node named * name in the map. */ public Node removeNamedItem(String name) throws DOMException { return internalRemoveNamedItem(name, true); } /** * Same as removeNamedItem except that it simply returns null if the * specified name is not found. */ Node safeRemoveNamedItem(String name) { return internalRemoveNamedItem(name, false); } /** * Internal removeNamedItem method allowing to specify whether an exception * must be thrown if the specified name is not found. */ final protected Node internalRemoveNamedItem(String name, boolean raiseEx){ if (isReadOnly()) { throw new DOMException(DOMException.NO_MODIFICATION_ALLOWED_ERR, "DOM001 Modification not allowed"); } int i = findNamePoint(name,0); if (i < 0) { if (raiseEx) { throw new DOMException(DOMException.NOT_FOUND_ERR, "DOM008 Not found"); } else { return null; } } LCount lc=null; String oldvalue=""; AttrImpl enclosingAttribute=null; if (NodeImpl.MUTATIONEVENTS && ownerNode.ownerDocument().mutationEvents) { // MUTATION PREPROCESSING AND PRE-EVENTS: lc=LCount.lookup(MutationEventImpl.DOM_ATTR_MODIFIED); if(lc.captures+lc.bubbles+lc.defaults>0) { enclosingAttribute=(AttrImpl)(nodes.elementAt(i)); oldvalue=enclosingAttribute.getNodeValue(); } } // End mutation preprocessing NodeImpl n = (NodeImpl)nodes.elementAt(i); // If there's a default, add it instead if (hasDefaults()) { NamedNodeMapImpl defaults = ((ElementImpl) ownerNode).getDefaultAttributes(); Node d; if (defaults != null && (d = defaults.getNamedItem(name)) != null && findNamePoint(name, i+1) < 0) { NodeImpl clone = (NodeImpl)d.cloneNode(true); clone.ownerNode = ownerNode; clone.isOwned(true); clone.isSpecified(false); nodes.setElementAt(clone, i); } else { nodes.removeElementAt(i); } } else { nodes.removeElementAt(i); } // changed(true); // remove owning element n.ownerNode = ownerNode.ownerDocument(); n.isOwned(false); // make sure it won't be mistaken with defaults in case it's reused n.isSpecified(true); // We can't use the standard dispatchAggregate, since it assumes // that the Attr is still attached to an owner. This code is // similar but dispatches to the previous owner, "element". if(NodeImpl.MUTATIONEVENTS && ownerNode.ownerDocument().mutationEvents) { // If we have to send DOMAttrModified (determined earlier), // do so. if(lc.captures+lc.bubbles+lc.defaults>0) { MutationEventImpl me= new MutationEventImpl(); me.initMutationEvent(MutationEventImpl.DOM_ATTR_MODIFIED, true, false, null, n.getNodeValue(), null, name, MutationEvent.REMOVAL); ownerNode.dispatchEvent(me); } // We can hand off to process DOMSubtreeModified, though. // Note that only the Element needs to be informed; the // Attr's subtree has not been changed by this operation. ownerNode.dispatchAggregateEvents(null,null,(short)0); } return n; } // removeNamedItem(String):Node /** * Introduced in DOM Level 2. <p> * Removes an attribute specified by local name and namespace URI. * @param namespaceURI * The namespace URI of the node to remove. * When it is null or an empty string, this * method behaves like removeNamedItem. * @param The local name of the node to remove. If the * removed attribute is known to have a default * value, an attribute immediately appears * containing the default value. * @return Node The node removed from the map if a node with such * a local name and namespace URI exists. * @throws NOT_FOUND_ERR: Raised if there is no node named * name in the map. */ public Node removeNamedItemNS(String namespaceURI, String name) throws DOMException { return internalRemoveNamedItemNS(namespaceURI, name, true); } /** * Same as removeNamedItem except that it simply returns null if the * specified local name and namespace URI is not found. */ Node safeRemoveNamedItemNS(String namespaceURI, String name) { return internalRemoveNamedItemNS(namespaceURI, name, false); } /** * Internal removeNamedItemNS method allowing to specify whether an * exception must be thrown if the specified local name and namespace URI * is not found. */ final protected Node internalRemoveNamedItemNS(String namespaceURI, String name, boolean raiseEx) { if (isReadOnly()) { throw new DOMException(DOMException.NO_MODIFICATION_ALLOWED_ERR, "DOM001 Modification not allowed"); } int i = findNamePoint(namespaceURI, name); if (i < 0) { if (raiseEx) { throw new DOMException(DOMException.NOT_FOUND_ERR, "DOM008 Not found"); } else { return null; } } LCount lc=null; String oldvalue=""; AttrImpl enclosingAttribute=null; if (NodeImpl.MUTATIONEVENTS && ownerNode.ownerDocument().mutationEvents) { // MUTATION PREPROCESSING AND PRE-EVENTS: lc=LCount.lookup(MutationEventImpl.DOM_ATTR_MODIFIED); if(lc.captures+lc.bubbles+lc.defaults>0) { enclosingAttribute=(AttrImpl)(nodes.elementAt(i)); oldvalue=enclosingAttribute.getNodeValue(); } } // End mutation preprocessing NodeImpl n = (NodeImpl)nodes.elementAt(i); // If there's a default, add it instead String nodeName = n.getNodeName(); if (hasDefaults()) { NamedNodeMapImpl defaults = ((ElementImpl) ownerNode).getDefaultAttributes(); Node d; if (defaults != null && (d = defaults.getNamedItem(nodeName)) != null) { int j = findNamePoint(nodeName,0); if (j>=0 && findNamePoint(nodeName, j+1) < 0) { NodeImpl clone = (NodeImpl)d.cloneNode(true); clone.ownerNode = ownerNode; clone.isOwned(true); clone.isSpecified(false); nodes.setElementAt(clone, i); } else { nodes.removeElementAt(i); } } else { nodes.removeElementAt(i); } } else { nodes.removeElementAt(i); } // changed(true); // Need to remove references to an Attr's owner before the // MutationEvents fire. n.ownerNode = ownerNode.ownerDocument(); n.isOwned(false); // make sure it won't be mistaken with defaults in case it's reused n.isSpecified(true); // We can't use the standard dispatchAggregate, since it assumes // that the Attr is still attached to an owner. This code is // similar but dispatches to the previous owner, "element". if(NodeImpl.MUTATIONEVENTS && ownerNode.ownerDocument().mutationEvents) { // If we have to send DOMAttrModified (determined earlier), // do so. if(lc.captures+lc.bubbles+lc.defaults>0) { MutationEventImpl me= new MutationEventImpl(); me.initMutationEvent(MutationEventImpl.DOM_ATTR_MODIFIED, true, false, null, n.getNodeValue(), null, name, MutationEvent.REMOVAL); ownerNode.dispatchEvent(me); } // We can hand off to process DOMSubtreeModified, though. // Note that only the Element needs to be informed; the // Attr's subtree has not been changed by this operation. ownerNode.dispatchAggregateEvents(null,null,(short)0); } return n; } // removeNamedItem(String):Node // Public methods /** * Cloning a NamedNodeMap is a DEEP OPERATION; it always clones * all the nodes contained in the map. */ public NamedNodeMapImpl cloneMap(NodeImpl ownerNode) { AttributeMap newmap = new AttributeMap((ElementImpl) ownerNode, null); newmap.hasDefaults(hasDefaults()); newmap.cloneContent(this); return newmap; } // cloneMap():AttributeMap /** * Override parent's method to set the ownerNode correctly */ protected void cloneContent(NamedNodeMapImpl srcmap) { if (srcmap.nodes != null) { nodes = new Vector(srcmap.nodes.size()); for (int i = 0; i < srcmap.nodes.size(); ++i) { NodeImpl n = (NodeImpl) srcmap.nodes.elementAt(i); NodeImpl clone = (NodeImpl) n.cloneNode(true); clone.isSpecified(n.isSpecified()); nodes.insertElementAt(clone, i); clone.ownerNode = ownerNode; clone.isOwned(true); } } } // cloneContent():AttributeMap // Protected methods /** * Subroutine: If this NamedNodeMap is backed by a "defaults" map (eg, * if it's being used for Attributes of an XML file validated against * a DTD), we need to deal with the risk that those defaults might * have changed. Entries may have been added, changed, or removed, and * if so we need to update our version of that information * <P> * Luckily, this currently applies _only_ to Attributes, which have a * "specified" flag that allows us to distinguish which we set manually * versus which were defaults... assuming that the defaults list is being * maintained properly, of course. * <P> * Also luckily, The NameNodeMaps are maintained as * sorted lists. This should keep the cost of convolving the two lists * managable... not wonderful, but at least more like 2N than N**2.. * <P> * Finally, to avoid doing the convolution except when there are actually * changes to be absorbed, I've made the Map aware of whether or not * its defaults Map has changed. This is not 110% reliable, but it should * work under normal circumstances, especially since the DTD is usually * relatively static information. * <P> * Note: This is NON-DOM implementation, though used to support behavior * that the DOM requires. */ } // class AttributeMap
package org.kairosdb.util; import java.util.concurrent.Callable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public abstract class RetryCallable implements Callable<Integer> { public static final Logger logger = LoggerFactory.getLogger(RetryCallable.class); private int m_retries = -1; @Override public final Integer call() throws Exception { m_retries ++; if (m_retries > 0) logger.info("Retrying batch"); retryCall(); return m_retries; } public abstract void retryCall() throws Exception; }
package org.lantern.proxy; import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.http.HttpRequest; import java.net.InetSocketAddress; import javax.net.ssl.SSLSession; import org.lantern.ClientStats; import org.lantern.LanternUtils; import org.lantern.PeerFactory; import org.lantern.event.Events; import org.lantern.event.ModeChangedEvent; import org.lantern.proxy.pt.PluggableTransport; import org.lantern.proxy.pt.PluggableTransports; import org.lantern.state.Mode; import org.lantern.state.Model; import org.lantern.state.Peer; import org.lantern.state.Settings; import org.littleshoot.proxy.ActivityTrackerAdapter; import org.littleshoot.proxy.FlowContext; import org.littleshoot.proxy.FullFlowContext; import org.littleshoot.proxy.HttpFilters; import org.littleshoot.proxy.HttpFiltersSourceAdapter; import org.littleshoot.proxy.SslEngineSource; import org.littleshoot.proxy.TransportProtocol; import org.littleshoot.proxy.impl.DefaultHttpProxyServer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.eventbus.Subscribe; import com.google.inject.Inject; import com.google.inject.Singleton; /** * <p> * HTTP proxy server for remote requests to Lantern (i.e. in Give Mode). * </p> * * <p> * GiveModeProxy starts and stops itself based on {@link ModeChangedEvent}s so * that it's only running when Lantern is in Give mode. * </p> */ @Singleton public class GiveModeProxy extends AbstractHttpProxyServerAdapter { private final Logger log = LoggerFactory.getLogger(getClass()); private Model model; private volatile boolean running = false; private PluggableTransport pluggableTransport; @Inject public GiveModeProxy( final ClientStats stats, final Model model, final SslEngineSource sslEngineSource, final PeerFactory peerFactory) { final Settings settings = model.getSettings(); int serverPort = settings.getServerPort(); boolean allowLocalOnly = false; if (settings.getProxyPtType() != null) { // When using a pluggable transport, the transport will use the // configured port and the server will use some random free port // that only allows local connections serverPort = LanternUtils.findFreePort(); allowLocalOnly = true; pluggableTransport = PluggableTransports.newTransport( settings.getProxyPtType(), settings.getProxyPtProps()); log.info("GiveModeProxy will use pluggable transport of type: " + pluggableTransport.getClass().getName()); } setBootstrap(DefaultHttpProxyServer .bootstrap() .withName("GiveModeProxy") .withPort(serverPort) .withTransportProtocol(settings.getProxyProtocol()) .withAllowLocalOnly(allowLocalOnly) .withListenOnAllAddresses(false) .withSslEngineSource(sslEngineSource) .withAuthenticateSslClients(!LanternUtils.isFallbackProxy()) // Use a filter to deny requests to non-public ips .withFiltersSource(new HttpFiltersSourceAdapter() { @Override public HttpFilters filterRequest( HttpRequest originalRequest, ChannelHandlerContext ctx) { return new GiveModeHttpFilters(originalRequest, ctx, model.getReportIp(), settings.getProxyPort(), settings.getProxyProtocol(), settings.getProxyAuthToken()); } }) // Keep stats up to date .plusActivityTracker(new ActivityTrackerAdapter() { @Override public void bytesReceivedFromClient( FlowContext flowContext, int numberOfBytes) { stats.addDownBytesFromPeers(numberOfBytes, flowContext.getClientAddress().getAddress()); Peer peer = peerFor(flowContext); if (peer != null) { peer.addBytesDn(numberOfBytes); } } @Override public void bytesSentToServer(FullFlowContext flowContext, int numberOfBytes) { stats.addUpBytesForPeers(numberOfBytes); } @Override public void bytesReceivedFromServer( FullFlowContext flowContext, int numberOfBytes) { stats.addDownBytesForPeers(numberOfBytes); } @Override public void bytesSentToClient(FlowContext flowContext, int numberOfBytes) { stats.addUpBytesToPeers(numberOfBytes, flowContext.getClientAddress().getAddress()); Peer peer = peerFor(flowContext); if (peer != null) { peer.addBytesUp(numberOfBytes); } } @Override public void clientSSLHandshakeSucceeded( InetSocketAddress clientAddress, SSLSession sslSession) { Peer peer = peerFor(sslSession); if (peer != null) { peer.connected(); } stats.addProxiedClientAddress(clientAddress .getAddress()); } @Override public void clientDisconnected( InetSocketAddress clientAddress, SSLSession sslSession) { Peer peer = peerFor(sslSession); if (peer != null) { peer.disconnected(); } } private Peer peerFor(FlowContext flowContext) { return peerFor(flowContext.getClientSslSession()); } private Peer peerFor(SSLSession sslSession) { return sslSession != null ? peerFactory .peerForSession(sslSession) : null; } })); this.model = model; Events.register(this); log.info( "Creating give mode proxy on port {}, running as fallback: {}", settings.getServerPort(), LanternUtils.isFallbackProxy()); } @Override public synchronized void start() { super.start(); if (TransportProtocol.TCP == model.getSettings().getProxyProtocol()) { InetSocketAddress original = server.getListenAddress(); InetSocketAddress next = new InetSocketAddress( original.getAddress(), original.getPort() - 443); server.clone() .withAddress(next) .withSslEngineSource(null) .start(); log.info("Added additional unencrypted server for TCP on port {}", next.getPort()); } // Start the pluggable transport if necessary if (pluggableTransport != null) { int port = model.getSettings().getServerPort(); InetSocketAddress giveModeAddress = server.getListenAddress(); pluggableTransport.startServer(port, giveModeAddress); } running = true; log.info("Started GiveModeProxy"); } @Override public synchronized void stop() { super.stop(); running = false; // Stop the pluggable transport if necessary if (pluggableTransport != null) { pluggableTransport.stopServer(); } log.info("Stopped GiveModeProxy"); } @Subscribe public void modeChanged(ModeChangedEvent event) { log.debug("Got mode change"); if (Mode.give == event.getNewMode()) { if (!running) { start(); } } else { if (running) { stop(); } } } }
package org.apache.helix.actor.api; import io.netty.buffer.ByteBuf; /** * Encodes and decodes messages of type T to and from {@link io.netty.buffer.ByteBuf}s */ public interface HelixActorMessageCodec<T> { /** * Encodes a typed message into a {@link io.netty.buffer.ByteBuf}. * * <p> * {@link io.netty.buffer.ByteBuf#release()} will be called once on the * return value, so if you want to ensure that it doesn't get * reclaimed, call {@link io.netty.buffer.ByteBuf#retain()} before * returning it. * </p> * * <p> * N.b. This does not necessarily need to generate a new ByteBuf. * Existing ByteBufs from a Netty pipeline may be used, for example. * </p> * * @see io.netty.buffer.ByteBuf#slice() * @see io.netty.buffer.CompositeByteBuf */ ByteBuf encode(T message); /** * Decodes a typed message from a {@link io.netty.buffer.ByteBuf}. * * <p> * The reader index will be positioned at the beginning of the message, * and the next {@link io.netty.buffer.ByteBuf#readableBytes()} are the * message. * </p> * * <p> * For example, if T is String, and you want to generate a new object: * <pre> * byte[] bytes = new byte[message.readableBytes()]; * message.readBytes(bytes); * return new String(bytes); * </pre> * </p> */ T decode(ByteBuf message); }
package org.beanmaker.util; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.CellStyle; import org.apache.poi.ss.usermodel.Font; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Workbook; import org.apache.poi.ss.util.CellRangeAddress; import org.apache.poi.xssf.usermodel.XSSFWorkbook; import org.dbbeans.util.Money; import java.sql.Date; import java.sql.Time; import java.sql.Timestamp; import java.util.Map; public abstract class BaseExcelExport extends TabularView { protected String sheetName; protected CellStyle superHeaderFormat; protected CellStyle headerFormat; protected CellStyle dateFormat; protected CellStyle timeFormat; protected CellStyle timestampFormat; protected CellStyle integerFormat; protected CellStyle decimalFormat; protected boolean hasSuperTitleRow = false; public BaseExcelExport(String resourceBundleName, String sheetName) { super(resourceBundleName); this.sheetName = sheetName; } protected void setLanguage(DbBeanLanguage dbBeanLanguage, final Map<String, String> labels) { this.dbBeanLanguage = dbBeanLanguage; yesName = labels.get("yes"); noName = labels.get("no"); setLocale(dbBeanLanguage.getLocale()); } public Workbook getExcelWorkbook() { return getExcelWorkbook(sheetName); } public Workbook getExcelWorkbook(final String sheetName) { final Workbook excelWorkbook = new XSSFWorkbook(); initDateFormat(excelWorkbook); initTimeFormat(excelWorkbook); initTimestampFormat(excelWorkbook); initSuperHeaderFormat(excelWorkbook); initHeaderFormat(excelWorkbook); initIntegerFormat(excelWorkbook); initDecimalFormat(excelWorkbook); final Sheet sheet = excelWorkbook.createSheet(sheetName); addSuperTitleRowTo(sheet); addTitleRowTo(sheet); addDataRowsTo(sheet); return excelWorkbook; } public String getFilename() { return sheetName + ".xlsx"; } protected void initSuperHeaderFormat(final Workbook excelWorkbook) { final Font font = excelWorkbook.createFont(); font.setBold(true); font.setFontHeightInPoints((short) 14); superHeaderFormat = excelWorkbook.createCellStyle(); superHeaderFormat.setFont(font); } protected void initHeaderFormat(final Workbook excelWorkbook) { final Font font = excelWorkbook.createFont(); font.setBold(true); headerFormat = excelWorkbook.createCellStyle(); headerFormat.setFont(font); } protected void initDateFormat(final Workbook excelWorkbook) { dateFormat = excelWorkbook.createCellStyle(); dateFormat.setDataFormat(excelWorkbook.getCreationHelper().createDataFormat().getFormat("m/d/yy")); } protected void initTimeFormat(final Workbook excelWorkbook) { timeFormat = excelWorkbook.createCellStyle(); timeFormat.setDataFormat(excelWorkbook.getCreationHelper().createDataFormat().getFormat("h:mm:ss")); } protected void initTimestampFormat(final Workbook excelWorkbook) { timestampFormat = excelWorkbook.createCellStyle(); timestampFormat.setDataFormat(excelWorkbook.getCreationHelper().createDataFormat().getFormat("m/d/yy h:mm")); } protected void initIntegerFormat(final Workbook excelWorkbook) { integerFormat = excelWorkbook.createCellStyle(); integerFormat.setDataFormat(excelWorkbook.getCreationHelper().createDataFormat().getFormat("0")); } protected void initDecimalFormat(final Workbook excelWorkbook) { decimalFormat = excelWorkbook.createCellStyle(); decimalFormat.setDataFormat(excelWorkbook.getCreationHelper().createDataFormat().getFormat("0.00")); } protected String getYesOrNoRepresentation(final boolean value) { if (value) return yesName; return noName; } protected void addSuperTitleRowTo(final Sheet sheet) { } protected int addSuperTitleCell( final Sheet sheet, final Row superTitleRow, final String propertyName, final int startingCellNumber, final int cellCount) { if (propertyName == null) throw new NullPointerException("propertyName cannot be null"); if (startingCellNumber < 0) throw new IllegalArgumentException("starting cell must be 0 or positive"); if (cellCount <= 0) throw new IllegalArgumentException("cell count must be 1 or more"); superTitleRow.createCell(startingCellNumber).setCellValue(resourceBundle.getString(propertyName)); superTitleRow.getCell(startingCellNumber).setCellStyle(superHeaderFormat); int nextCell = startingCellNumber + cellCount; sheet.addMergedRegion(new CellRangeAddress(0, 0, startingCellNumber, nextCell - 1)); return nextCell; } protected abstract void addTitleRowTo(final Sheet sheet); protected void addAdHocTitleCell(final Row titleRow, final String text, final int cellNumber) { titleRow.createCell(cellNumber).setCellValue(text); titleRow.getCell(cellNumber).setCellStyle(headerFormat); } protected void addTitleCell(final Row titleRow, final String resourceLabel, final int cellNumber) { addAdHocTitleCell(titleRow, resourceBundle.getString(resourceLabel), cellNumber); } protected void addIdTitleCell(final Row titleRow, final int cellNumber) { addTitleCell(titleRow, "id", cellNumber); } protected abstract void addDataRowsTo(final Sheet sheet); protected void addIdCell(final Row row, final DbBeanInterface bean, final int cellNumber) { row.createCell(cellNumber).setCellValue(bean.getId()); row.getCell(cellNumber).setCellStyle(integerFormat); } protected void addDataCell(final Row row, final int cellNumber, final String data) { row.createCell(cellNumber).setCellValue(data); } protected void addDataCell(final Row row, final int cellNumber, final boolean data) { row.createCell(cellNumber).setCellValue(getYesOrNoRepresentation(data)); } protected void addDataCell(final Row row, final int cellNumber, final int data) { row.createCell(cellNumber).setCellValue(data); row.getCell(cellNumber).setCellStyle(integerFormat); } protected void addDataCell(final Row row, final int cellNumber, final long data) { row.createCell(cellNumber).setCellValue(data); row.getCell(cellNumber).setCellStyle(integerFormat); } protected void addDataCell(final Row row, final int cellNumber, final Date data) { row.createCell(cellNumber).setCellValue(data); row.getCell(cellNumber).setCellStyle(dateFormat); } protected void addDataCell(final Row row, final int cellNumber, final Time data) { row.createCell(cellNumber).setCellValue(data); row.getCell(cellNumber).setCellStyle(timeFormat); } protected void addDataCell(final Row row, final int cellNumber, final Timestamp data) { row.createCell(cellNumber).setCellValue(data); row.getCell(cellNumber).setCellStyle(timestampFormat); } protected void addDataCell(final Row row, final int cellNumber, final Money data) { row.createCell(cellNumber).setCellValue(data.getDoubleVal()); row.getCell(cellNumber).setCellStyle(decimalFormat); } protected void addDataCell(final Row row, final int cellNumber, final double data) { row.createCell(cellNumber).setCellValue(data); row.getCell(cellNumber).setCellStyle(decimalFormat); } protected void autosizeColumns(final Sheet sheet, final int columns) { for (int i = 0; i < columns; ++i) sheet.autoSizeColumn(i); } }
package org.broad.igv.ga4gh; import com.google.gson.*; import org.broad.igv.PreferenceManager; import org.broad.igv.sam.Alignment; import org.broad.igv.ui.IGV; import org.broad.igv.util.Pair; import java.awt.*; import java.io.IOException; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; import java.util.*; import java.util.List; import java.util.zip.GZIPInputStream; public class Ga4ghAPIHelper { public static final String RESOURCE_TYPE = "ga4gh"; public static final Ga4ghProvider[] providers = { new Ga4ghProvider( "Google", "https: "AIzaSyC-dujgw4P1QvNd8i_c-I-S_P1uxVZzn0w", Arrays.asList( new Ga4ghDataset("10473108253681171589", "1000 Genomes", "hg19"), new Ga4ghDataset("383928317087", "PGP", "hg19"), new Ga4ghDataset("461916304629", "Simons Foundation", "hg19") )), new Ga4ghProvider("NCBI", "http://trace.ncbi.nlm.nih.gov/Traces/gg", null, Arrays.asList( new Ga4ghDataset("SRP034507", "SRP034507", "M74568"), new Ga4ghDataset("SRP029392", "SRP029392", "NC_004917") ))}; final static Map<String, List<Ga4ghReadset>> readsetCache = new HashMap<String, List<Ga4ghReadset>>(); public static List<Ga4ghReadset> readsetSearch(Ga4ghProvider provider, Ga4ghDataset dataset, int maxResults) throws IOException { String datasetId = dataset.getId(); List<Ga4ghReadset> readsets = readsetCache.get(datasetId); if (readsets == null) { readsets = new ArrayList(); String genomeId = genomeIdMap.get(provider.getName() + " " + datasetId); // Hack until meta data on readsets is available // Loop through pages int maxPages = 100; JsonPrimitive pageToken = null; while (maxPages String contentToPost = "{" + "\"datasetIds\": [\"" + datasetId + "\"]" + (pageToken == null ? "" : ", \"pageToken\": " + pageToken) + ", \"maxResults\":" + maxResults + "}"; String result = doPost(provider, "/readsets/search", contentToPost, null); //"fields=readsets(id,name, fileData),nextPageToken"); JsonParser parser = new JsonParser(); JsonObject obj = parser.parse(result).getAsJsonObject(); Iterator<JsonElement> iter = obj.getAsJsonArray("readsets").iterator(); while (iter.hasNext()) { JsonElement next = iter.next(); JsonObject jobj = next.getAsJsonObject(); String id = jobj.get("id").getAsString(); String name = jobj.get("name").getAsString(); readsets.add(new Ga4ghReadset(id, name, genomeId)); } if(readsets.size() >= maxResults) break; pageToken = obj.getAsJsonPrimitive("nextPageToken"); if (pageToken == null) break; } Collections.sort(readsets, new Comparator<Ga4ghReadset>() { @Override public int compare(Ga4ghReadset o1, Ga4ghReadset o2) { return o1.getName().compareTo(o2.getName()); } }); readsetCache.put(datasetId, readsets); } return readsets; } public static List<Alignment> reads(Ga4ghProvider provider, String readsetId, String chr, int start, int end) throws IOException { List<Alignment> alignments = new ArrayList<Alignment>(10000); int maxPages = 10000; JsonPrimitive pageToken = null; StringBuffer result = new StringBuffer(); while (maxPages String contentToPost = "{" + "\"readsetIds\": [\"" + readsetId + "\"]" + ", \"sequenceName\": \"" + chr + "\"" + ", \"sequenceStart\": \"" + start + "\"" + ", \"sequenceEnd\": \"" + end + "\"" + ", \"maxResults\": \"10000\"" + (pageToken == null ? "" : ", \"pageToken\": " + pageToken) + "}"; String readString = doPost(provider, "/reads/search", contentToPost, ""); JsonParser parser = new JsonParser(); JsonObject obj = parser.parse(readString).getAsJsonObject(); JsonArray reads = obj.getAsJsonArray("reads"); Iterator<JsonElement> iter = reads.iterator(); while (iter.hasNext()) { JsonElement next = iter.next(); Ga4ghAlignment alignment = new Ga4ghAlignment(next.getAsJsonObject()); alignments.add(alignment); } System.out.println("# reads = " + reads.size()); pageToken = obj.getAsJsonPrimitive("nextPageToken"); if (pageToken == null) break; } System.out.println("# pages= " + (10000 - maxPages)); return alignments; } private static String doPost(Ga4ghProvider provider, String command, String content, String fields) throws IOException { String authKey = provider.getAuthKey(); String baseURL = provider.getBaseURL(); String fullUrl = baseURL + command; if (authKey != null) fullUrl += "?key=" + authKey; if (fields != null) { fullUrl += (authKey == null ? "?" : "&") + fields; } URL url = new URL(fullUrl); byte[] bytes = content.getBytes(); // Create a URLConnection HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setUseCaches(false); connection.setDoInput(true); connection.setDoOutput(true); connection.setRequestMethod("POST"); //connection.setRequestProperty("Content-Length", "" + bytes.length); connection.setRequestProperty("Content-Type", "application/json"); connection.setRequestProperty("Cache-Control", "no-cache"); connection.setRequestProperty("Accept-Encoding", "gzip"); connection.setRequestProperty("User-Agent", "IGV (gzip)"); // Post content java.io.OutputStream stream = connection.getOutputStream(); stream.write(bytes); stream.close(); // Read the response java.io.BufferedReader br = new java.io.BufferedReader(new InputStreamReader(new GZIPInputStream(connection.getInputStream()))); StringBuffer sb = new StringBuffer(); String str = br.readLine(); while (str != null) { sb.append(str); str = br.readLine(); } br.close(); return sb.toString(); } static Map<String, String> genomeIdMap = new HashMap<String, String>(); // A hack until readset meta data is available static { genomeIdMap = new HashMap<String, String>(); genomeIdMap.put("Google 10473108253681171589", "hg19"); genomeIdMap.put("Google 383928317087", "hg19"); genomeIdMap.put("Google 461916304629", "hg19"); genomeIdMap.put("Google 337315832689", "hg19"); genomeIdMap.put("NCBI SRP034507", "M74568"); genomeIdMap.put("NCBI SRP029392", "NC_004917"); } }
package org.lightmare.deploy; import java.io.File; import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.net.URL; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.List; import java.util.Properties; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executors; import java.util.concurrent.Future; import javax.annotation.Resource; import javax.ejb.EJB; import javax.ejb.Local; import javax.ejb.Remote; import javax.ejb.TransactionAttribute; import javax.ejb.TransactionAttributeType; import javax.ejb.TransactionManagement; import javax.ejb.TransactionManagementType; import javax.interceptor.AroundInvoke; import javax.interceptor.Interceptors; import javax.persistence.PersistenceContext; import javax.persistence.PersistenceUnit; import org.apache.log4j.Logger; import org.lightmare.cache.ConnectionContainer; import org.lightmare.cache.ConnectionData; import org.lightmare.cache.ConnectionSemaphore; import org.lightmare.cache.DeployData; import org.lightmare.cache.InjectionData; import org.lightmare.cache.InterceptorData; import org.lightmare.cache.MetaContainer; import org.lightmare.cache.MetaData; import org.lightmare.config.Configuration; import org.lightmare.ejb.exceptions.BeanInUseException; import org.lightmare.jpa.datasource.InitMessages; import org.lightmare.jpa.datasource.Initializer; import org.lightmare.libraries.LibraryLoader; import org.lightmare.rest.providers.RestProvider; import org.lightmare.utils.CollectionUtils; import org.lightmare.utils.NamingUtils; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.beans.BeanUtils; import org.lightmare.utils.fs.FileUtils; import org.lightmare.utils.fs.WatchUtils; import org.lightmare.utils.reflect.MetaUtils; import org.lightmare.utils.rest.RestCheck; /** * Class for running in distinct thread to initialize * {@link javax.sql.DataSource}s load libraries and {@link javax.ejb.Stateless} * session beans and cache them and clean resources after deployments * * @author levan * */ public class BeanLoader { private static final Logger LOG = Logger.getLogger(BeanLoader.class); /** * PrivilegedAction implementation to set * {@link Executors#privilegedCallableUsingCurrentClassLoader()} passed * {@link Callable} class * * @author levan * * @param <T> */ private static class ContextLoaderAction<T> implements PrivilegedAction<Callable<T>> { private final Callable<T> current; public ContextLoaderAction(Callable<T> current) { this.current = current; } @Override public Callable<T> run() { Callable<T> privileged = Executors.privilegedCallable(current); return privileged; } } /** * {@link Runnable} implementation for initializing and deploying * {@link javax.sql.DataSource} * * @author levan * */ private static class ConnectionDeployer implements Callable<Boolean> { private Properties properties; private CountDownLatch blocker; private boolean countedDown; public ConnectionDeployer(DataSourceParameters parameters) { this.properties = parameters.properties; this.blocker = parameters.blocker; } private void releaseBlocker() { if (ObjectUtils.notTrue(countedDown)) { blocker.countDown(); countedDown = Boolean.TRUE; } } @Override public Boolean call() throws Exception { boolean result; ClassLoader loader = LoaderPoolManager.getCurrent(); try { Initializer.registerDataSource(properties); result = Boolean.TRUE; } catch (IOException ex) { result = Boolean.FALSE; LOG.error(InitMessages.INITIALIZING_ERROR, ex); } finally { releaseBlocker(); LibraryLoader.loadCurrentLibraries(loader); } return result; } } /** * {@link Runnable} implementation for temporal resources removal * * @author levan * */ private static class ResourceCleaner implements Callable<Boolean> { List<File> tmpFiles; public ResourceCleaner(List<File> tmpFiles) { this.tmpFiles = tmpFiles; } /** * Removes temporal resources after deploy {@link Thread} notifies * * @throws InterruptedException */ private void clearTmpData() throws InterruptedException { synchronized (tmpFiles) { tmpFiles.wait(); } for (File tmpFile : tmpFiles) { FileUtils.deleteFile(tmpFile); LOG.info(String.format("Cleaning temporal resource %s done", tmpFile.getName())); } } @Override public Boolean call() throws Exception { boolean result; ClassLoader loader = LoaderPoolManager.getCurrent(); try { clearTmpData(); result = Boolean.TRUE; } catch (InterruptedException ex) { result = Boolean.FALSE; LOG.error("Coluld not clean temporary resources", ex); } finally { LibraryLoader.loadCurrentLibraries(loader); } return result; } } /** * {@link Callable} implementation for deploying {@link javax.ejb.Stateless} * session beans and cache {@link MetaData} keyed by bean name * * @author levan * */ private static class BeanDeployer implements Callable<String> { private MetaCreator creator; private String beanName; private String className; private ClassLoader loader; private List<File> tmpFiles; private MetaData metaData; private CountDownLatch blocker; private boolean countedDown; private List<Field> unitFields; private DeployData deployData; private boolean chekcWatch; private Configuration configuration; public BeanDeployer(BeanParameters parameters) { this.creator = parameters.creator; this.beanName = parameters.beanName; this.className = parameters.className; this.loader = parameters.loader; this.tmpFiles = parameters.tmpFiles; this.metaData = parameters.metaData; this.blocker = parameters.blocker; this.deployData = parameters.deployData; this.configuration = parameters.configuration; } /** * Locks {@link ConnectionSemaphore} if needed for connection processing * * @param semaphore * @param unitName * @param jndiName * @throws IOException */ private void lockSemaphore(ConnectionSemaphore semaphore, String unitName, String jndiName) throws IOException { synchronized (semaphore) { if (ObjectUtils.notTrue(semaphore.isCheck())) { try { creator.configureConnection(unitName, beanName, loader, configuration); } finally { semaphore.notifyAll(); } } } } /** * Increases {@link CountDownLatch} blocker if it is first time in * current thread */ private void releaseBlocker() { if (ObjectUtils.notTrue(countedDown)) { blocker.countDown(); countedDown = Boolean.TRUE; } } /** * Checks if bean {@link MetaData} with same name already cached if it * is increases {@link CountDownLatch} for connection and throws * {@link BeanInUseException} else caches meta data with associated name * * @param beanEjbName * @throws BeanInUseException */ private void checkAndSetBean(String beanEjbName) throws BeanInUseException { try { MetaContainer.checkAndAddMetaData(beanEjbName, metaData); } catch (BeanInUseException ex) { releaseBlocker(); throw ex; } } private void addUnitField(Field unitField) { if (unitFields == null) { unitFields = new ArrayList<Field>(); } unitFields.add(unitField); } /** * Checks weather connection with passed unit or jndi name already * exists * * @param unitName * @param jndiName * @return <code>boolean</code> */ private boolean checkOnEmf(String unitName, String jndiName) { boolean checkForEmf = ConnectionContainer.checkForEmf(unitName); if (ObjectUtils.available(jndiName)) { jndiName = NamingUtils.createJpaJndiName(jndiName); checkForEmf = checkForEmf && ConnectionContainer.checkForEmf(jndiName); } return checkForEmf; } /** * Creates {@link ConnectionSemaphore} if such does not exists * * @param context * @param field * @return <code>boolean</code> * @throws IOException */ private void identifyConnections(PersistenceContext context, Field connectionField) throws IOException { ConnectionData connection = new ConnectionData(); connection.setConnectionField(connectionField); String unitName = context.unitName(); String jndiName = context.name(); connection.setUnitName(unitName); connection.setJndiName(jndiName); boolean checkForEmf = checkOnEmf(unitName, jndiName); ConnectionSemaphore semaphore; if (checkForEmf) { releaseBlocker(); semaphore = ConnectionContainer.getSemaphore(unitName); connection.setConnection(semaphore); } else { // Sets connection semaphore for this connection semaphore = ConnectionContainer.cacheSemaphore(unitName, jndiName); connection.setConnection(semaphore); releaseBlocker(); if (ObjectUtils.notNull(semaphore)) { lockSemaphore(semaphore, unitName, jndiName); } } metaData.addConnection(connection); } /** * Caches {@link EJB} annotated fields * * @param beanClass */ private void cacheInjectFields(Field field) { EJB ejb = field.getAnnotation(EJB.class); Class<?> interfaceClass = ejb.beanInterface(); if (interfaceClass == null || interfaceClass.equals(Object.class)) { interfaceClass = field.getType(); } String name = ejb.beanName(); if (name == null || name.isEmpty()) { name = BeanUtils.nameFromInterface(interfaceClass); } String description = ejb.description(); String mappedName = ejb.mappedName(); Class<?>[] interfaceClasses = { interfaceClass }; InjectionData injectionData = new InjectionData(); injectionData.setField(field); injectionData.setInterfaceClasses(interfaceClasses); injectionData.setName(name); injectionData.setDescription(description); injectionData.setMappedName(mappedName); metaData.addInject(injectionData); } /** * Finds and caches {@link PersistenceContext}, {@link PersistenceUnit} * and {@link Resource} annotated {@link Field}s in bean class and * configures connections and creates {@link ConnectionSemaphore}s if it * does not exists for {@link PersistenceContext#unitName()} object * * @throws IOException */ private void retrieveConnections() throws IOException { Class<?> beanClass = metaData.getBeanClass(); Field[] fields = beanClass.getDeclaredFields(); PersistenceUnit unit; PersistenceContext context; Resource resource; EJB ejbAnnot; if (ObjectUtils.notAvailable(fields)) { releaseBlocker(); } for (Field field : fields) { context = field.getAnnotation(PersistenceContext.class); resource = field.getAnnotation(Resource.class); unit = field.getAnnotation(PersistenceUnit.class); ejbAnnot = field.getAnnotation(EJB.class); if (ObjectUtils.notNull(context)) { identifyConnections(context, field); } else if (ObjectUtils.notNull(resource)) { metaData.setTransactionField(field); } else if (ObjectUtils.notNull(unit)) { addUnitField(field); } else if (ObjectUtils.notNull(ejbAnnot)) { // caches EJB annotated fields cacheInjectFields(field); } } if (ObjectUtils.available(unitFields)) { metaData.addUnitFields(unitFields); } } /** * Creates {@link MetaData} for bean class * * @param beanClass * @throws ClassNotFoundException */ private void createMeta(Class<?> beanClass) throws IOException { metaData.setBeanClass(beanClass); if (Configuration.isServer()) { retrieveConnections(); } else { releaseBlocker(); } metaData.setLoader(loader); } /** * Checks if bean class is annotated as {@link TransactionAttribute} and * {@link TransactionManagement} and caches * {@link TransactionAttribute#value()} and * {@link TransactionManagement#value()} in {@link MetaData} object * * @param beanClass */ private void checkOnTransactional(Class<?> beanClass) { TransactionAttribute transactionAttribute = beanClass .getAnnotation(TransactionAttribute.class); TransactionManagement transactionManagement = beanClass .getAnnotation(TransactionManagement.class); boolean transactional = Boolean.FALSE; TransactionAttributeType transactionAttrType; TransactionManagementType transactionManType; if (transactionAttribute == null) { transactional = Boolean.TRUE; transactionAttrType = TransactionAttributeType.REQUIRED; transactionManType = TransactionManagementType.CONTAINER; } else if (transactionManagement == null) { transactionAttrType = transactionAttribute.value(); transactionManType = TransactionManagementType.CONTAINER; } else { transactionAttrType = transactionAttribute.value(); transactionManType = transactionManagement.value(); } metaData.setTransactional(transactional); metaData.setTransactionAttrType(transactionAttrType); metaData.setTransactionManType(transactionManType); } /** * Caches {@link Interceptors} annotation defined data * * @param beanClass * @param interceptorClasses * @throws IOException */ private void cacheInterceptors(Class<?> beanClass, Class<?>[] interceptorClasses, Method beanMethod) throws IOException { int length = interceptorClasses.length; Class<?> interceptorClass; List<Method> interceptorMethods; Method interceptorMethod; for (int i = 0; i < length; i++) { interceptorClass = interceptorClasses[i]; interceptorMethods = MetaUtils.getAnnotatedMethods(beanClass, AroundInvoke.class); interceptorMethod = CollectionUtils .getFirst(interceptorMethods); InterceptorData data = new InterceptorData(); data.setBeanClass(beanClass); data.setBeanMethod(beanMethod); data.setInterceptorClass(interceptorClass); data.setInterceptorMethod(interceptorMethod); metaData.addInterceptor(data); } } private void cacheInterceptors(Interceptors interceptors, Class<?> beanClass, Method... beanMethods) throws IOException { Class<?>[] interceptorClasses = interceptors.value(); if (ObjectUtils.available(interceptorClasses)) { Method beanMethod = CollectionUtils.getFirst(beanMethods); cacheInterceptors(beanClass, interceptorClasses, beanMethod); } } /** * Identifies and caches {@link Interceptors} annotation data * * @throws IOException */ private void identifyInterceptors(Class<?> beanClass) throws IOException { Interceptors interceptors = beanClass .getAnnotation(Interceptors.class); if (ObjectUtils.notNull(interceptors)) { cacheInterceptors(interceptors, beanClass); } List<Method> beanMethods = MetaUtils.getAnnotatedMethods(beanClass, Interceptors.class); if (ObjectUtils.available(beanMethods)) { for (Method beanMethod : beanMethods) { interceptors = beanMethod.getAnnotation(Interceptors.class); cacheInterceptors(interceptors, beanClass, beanMethod); } } } /** * Identifies and caches bean interfaces * * @param beanClass */ private void indentifyInterfaces(Class<?> beanClass) { Class<?>[] remoteInterface = null; Class<?>[] localInterface = null; Class<?>[] interfaces; List<Class<?>> interfacesList; Remote remote = beanClass.getAnnotation(Remote.class); Local local = beanClass.getAnnotation(Local.class); interfaces = beanClass.getInterfaces(); if (ObjectUtils.notNull(remote)) { remoteInterface = remote.value(); } interfacesList = new ArrayList<Class<?>>(); for (Class<?> interfaceClass : interfaces) { if (interfaceClass.isAnnotationPresent(Remote.class)) interfacesList.add(interfaceClass); } if (ObjectUtils.available(interfacesList)) { remoteInterface = interfacesList .toArray(new Class<?>[interfacesList.size()]); } if (ObjectUtils.notNull(local)) { localInterface = local.value(); } interfacesList = new ArrayList<Class<?>>(); for (Class<?> interfaceClass : interfaces) { if (interfaceClass.isAnnotationPresent(Local.class)) interfacesList.add(interfaceClass); } if (ObjectUtils.available(interfacesList)) { localInterface = interfacesList .toArray(new Class<?>[interfacesList.size()]); } if (ObjectUtils.notAvailable(localInterface) && ObjectUtils.notAvailable(remoteInterface)) { localInterface = interfaces; } metaData.setLocalInterfaces(localInterface); metaData.setRemoteInterfaces(remoteInterface); } /** * Loads and caches bean {@link Class} by name * * @return * @throws IOException */ private String createBeanClass() throws IOException { try { Class<?> beanClass = MetaUtils.classForName(className, Boolean.FALSE, loader); checkOnTransactional(beanClass); String beanEjbName = BeanUtils.beanName(beanClass); checkAndSetBean(beanEjbName); if (RestCheck.check(beanClass)) { RestProvider.add(beanClass); } createMeta(beanClass); indentifyInterfaces(beanClass); identifyInterceptors(beanClass); metaData.setInProgress(Boolean.FALSE); return beanEjbName; } catch (IOException ex) { releaseBlocker(); throw ex; } } private String deployFile() { String deployed = beanName; ClassLoader currentLoader = LoaderPoolManager.getCurrent(); try { LibraryLoader.loadCurrentLibraries(loader); deployed = createBeanClass(); chekcWatch = WatchUtils.checkForWatch(deployData); if (chekcWatch) { URL url = deployData.getUrl(); url = WatchUtils.clearURL(url); MetaContainer.addBeanName(url, deployed); } LOG.info(String.format("bean %s deployed", beanName)); } catch (IOException ex) { LOG.error(String.format("Could not deploy bean %s cause %s", beanName, ex.getMessage()), ex); } finally { LibraryLoader.loadCurrentLibraries(currentLoader); } return deployed; } private String deployExtracted() { String deployed; synchronized (tmpFiles) { try { deployed = deployFile(); } finally { tmpFiles.notifyAll(); } } return deployed; } private String deploy() { synchronized (metaData) { String deployed; try { if (ObjectUtils.notNull(tmpFiles)) { deployed = deployExtracted(); } else { deployed = deployFile(); } } catch (Exception ex) { LOG.error(ex.getMessage(), ex); deployed = null; } finally { releaseBlocker(); metaData.notifyAll(); } return deployed; } } @Override public String call() throws Exception { String deployed = deploy(); return deployed; } } /** * Contains parameters for bean deploy classes * * @author levan * */ public static class BeanParameters { public MetaCreator creator; public String className; public String beanName; public ClassLoader loader; public List<File> tmpFiles; public CountDownLatch blocker; public MetaData metaData; public DeployData deployData; public boolean server; public Configuration configuration; } /** * Contains parameters for data source deploy classes * * @author levan * */ public static class DataSourceParameters { public Properties properties; public Properties poolProperties; public String poolPath; public CountDownLatch blocker; } /** * Creates and starts bean deployment process * * @param creator * @param className * @param loader * @param tmpFiles * @param conn * @return {@link Future} * @throws IOException */ public static Future<String> loadBean(BeanParameters parameters) throws IOException { parameters.metaData = new MetaData(); String beanName = BeanUtils.parseName(parameters.className); parameters.beanName = beanName; BeanDeployer beanDeployer = new BeanDeployer(parameters); Future<String> future = LoaderPoolManager.submit(beanDeployer); return future; } /** * Initialized {@link javax.sql.DataSource}s in parallel mode * * @param initializer * @param properties * @param sdLatch */ public static void initializeDatasource(DataSourceParameters parameters) throws IOException { final ConnectionDeployer connectionDeployer = new ConnectionDeployer( parameters); Callable<Boolean> privileged = AccessController .doPrivileged(new ContextLoaderAction<Boolean>( connectionDeployer)); LoaderPoolManager.submit(privileged); } /** * Creates and starts temporal resources removal process * * @param tmpFiles */ public static void removeResources(List<File> tmpFiles) { ResourceCleaner cleaner = new ResourceCleaner(tmpFiles); Callable<Boolean> privileged = AccessController .doPrivileged(new ContextLoaderAction<Boolean>(cleaner)); LoaderPoolManager.submit(privileged); } }
package org.ensembl.healthcheck.util; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.Calendar; import java.util.Enumeration; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.StringTokenizer; import java.util.jar.JarFile; /** * General utilities (not database-related). For database-related utilities, see * {@link DBUtils DBUtils}. */ public final class Utils { // hide constuctor to prevent instantiation private Utils() { } /** * Read the <code>database.properties</code> file into the System properties * so that it can be overridden with -D. * * @param propertiesFileName * The properties file to read. * @param skipBuildDatabaseURLs * Don't automatically build database URLs. Should generally be * false. */ public static void readPropertiesFileIntoSystem(final String propertiesFileName, final boolean skipBuildDatabaseURLs) { String propsFile; // Prepend home directory if not absolute path if (propertiesFileName.indexOf(File.separator) == -1) { propsFile = System.getProperty("user.dir") + File.separator + propertiesFileName; } else { propsFile = propertiesFileName; } Properties dbProps = Utils.readSimplePropertiesFile(propsFile); Enumeration e = dbProps.propertyNames(); String name, value; while (e.hasMoreElements()) { name = (String) e.nextElement(); value = dbProps.getProperty(name); // add to System System.setProperty(name, value); } if (!skipBuildDatabaseURLs) { buildDatabaseURLs(); } } // readPropertiesFile /** * Build database URLs from system properties. * * @param propertiesFileName * The properties file to read. */ public static void buildDatabaseURLs() { // check if a databaseURL property has been specified; if so, use it // if not, build the databaseURL property from host, port etc String databaseURL = System.getProperty("databaseURL"); if (databaseURL == null || databaseURL.equals("")) { // check that required properties are set checkProperties(); // build it databaseURL = "jdbc:mysql: if (System.getProperty("host") != null) { databaseURL += System.getProperty("host"); } if (System.getProperty("port") != null) { databaseURL += ":" + System.getProperty("port"); } databaseURL += "/"; System.setProperty("databaseURL", databaseURL); } else { // validate database URL - if it doesn't start with jdbc: this can // cause confusion String prefix = databaseURL.substring(0, 5); if (!prefix.equalsIgnoreCase("jdbc:")) { System.err .println("WARNING - databaseURL property should start with jdbc: but it does not seem to. Check this if you experience problems loading the database driver"); } } // similarly for secondary database URL String secondaryDatabaseURL = System.getProperty("secondary.databaseURL"); if (secondaryDatabaseURL == null || secondaryDatabaseURL.equals("")) { // build it secondaryDatabaseURL = "jdbc:mysql: if (System.getProperty("secondary.host") != null) { secondaryDatabaseURL += System.getProperty("secondary.host"); } if (System.getProperty("secondary.port") != null) { secondaryDatabaseURL += ":" + System.getProperty("secondary.port"); } secondaryDatabaseURL += "/"; System.setProperty("secondary.databaseURL", secondaryDatabaseURL); } else { // validate secondary database URL - if it doesn't start with jdbc: this // can // cause confusion String prefix = secondaryDatabaseURL.substring(0, 5); if (!prefix.equalsIgnoreCase("jdbc:")) { System.err .println("WARNING - secondary.databaseURL property should start with jdbc: but it does not seem to. Check this if you experience problems loading the database driver"); } } // ... and for output database URL String outputDatabaseURL = System.getProperty("output.databaseURL"); if (outputDatabaseURL == null || outputDatabaseURL.equals("")) { // build it outputDatabaseURL = "jdbc:mysql: if (System.getProperty("output.host") != null) { outputDatabaseURL += System.getProperty("output.host"); } if (System.getProperty("output.port") != null) { outputDatabaseURL += ":" + System.getProperty("output.port"); } outputDatabaseURL += "/"; System.setProperty("output.databaseURL", outputDatabaseURL); } else { // validate output database URL - if it doesn't start with jdbc: this // can // cause confusion String prefix = outputDatabaseURL.substring(0, 5); if (!prefix.equalsIgnoreCase("jdbc:")) { System.err .println("WARNING - output.databaseURL property should start with jdbc: but it does not seem to. Check this if you experience problems loading the database driver"); } } } /** * Check that certain properties are set. */ private static void checkProperties() { // check that properties that need to be set are set String[] requiredProps = { "port", "host", "user" }; for (int i = 0; i < requiredProps.length; i++) { if (System.getProperty(requiredProps[i]) == null) { System.err.println("WARNING: " + requiredProps[i] + " is not set in config file or on command line - cannot connect to database"); System.exit(1); } } } /** * Read a properties file. * * @param propertiesFileName * The name of the properties file to use. * @return The Properties hashtable. */ public static Properties readSimplePropertiesFile(String propertiesFileName) { Properties props = new Properties(); try { FileInputStream in = new FileInputStream(propertiesFileName); props.load(in); in.close(); } catch (Exception e) { e.printStackTrace(); System.exit(1); } return props; } // readPropertiesFile /** * Print a list of Strings, one per line. * * @param l * The List to be printed. */ public static void printList(List l) { Iterator it = l.iterator(); while (it.hasNext()) { System.out.println((String) it.next()); } } // printList /** * Concatenate a list of Strings into a single String. * * @param list * The Strings to list. * @param delim * The delimiter to use. * @return A String containing the elements of list separated by delim. No * trailing delimiter. */ public static String listToString(List list, String delim) { StringBuffer buf = new StringBuffer(); Iterator it = list.iterator(); while (it.hasNext()) { buf.append((String) it.next()); if (it.hasNext()) { buf.append(delim); } } return buf.toString(); } /** * Concatenate an array of Strings into a single String. * * @param a * The Strings to list. * @param delim * The delimiter to use. * @return A String containing the elements of a separated by delim. No * trailing delimiter. */ public static String arrayToString(String[] a, String delim) { StringBuffer buf = new StringBuffer(); for (int i = 0; i < a.length; i++) { buf.append(a[i]); if (i + 1 < a.length) { buf.append(delim); } } return buf.toString(); } /** * Print the keys in a HashMap. * * @param m * The map to use. */ public static void printKeys(Map m) { Set s = m.keySet(); Iterator it = s.iterator(); while (it.hasNext()) { System.out.println((String) it.next()); } } // printKeys /** * Print an array of Strings, one per line. * * @param a * The array to be printed. */ public static void printArray(String[] a) { for (int i = 0; i < a.length; i++) { System.out.println(a[i]); } } // printArray /** * Print an Enumeration, one String per line. * * @param e * The enumeration to be printed. */ public static void printEnumeration(Enumeration e) { while (e.hasMoreElements()) { System.out.println(e.nextElement()); } } // printEnumeration /** * Split a classpath-like string into a list of constituent paths. * * @param classPath * The String to split. * @param delim * FileSystem classpath delimiter. * @return An array containing one string per path, in the order they appear * in classPath. */ public static String[] splitClassPath(String classPath, String delim) { StringTokenizer tok = new StringTokenizer(classPath, delim); String[] paths = new String[tok.countTokens()]; int i = 0; while (tok.hasMoreElements()) { paths[i++] = tok.nextToken(); } return paths; } // splitClassPath /** * Search an array of strings for those that contain a pattern. * * @param paths * The List to search. * @param pattern * The pattern to look for. * @return The matching paths, in the order that they were in the input array. */ public static String[] grepPaths(String[] paths, String pattern) { int count = 0; for (int i = 0; i < paths.length; i++) { if (paths[i].indexOf(pattern) > -1) { count++; } } String[] greppedPaths = new String[count]; int j = 0; for (int i = 0; i < paths.length; i++) { if (paths[i].indexOf(pattern) > -1) { greppedPaths[j++] = paths[i]; } } return greppedPaths; } // grepPaths /** * Print the contents of a jar file. * * @param path * The path to the jar file. */ public static void printJarFileContents(String path) { try { JarFile f = new JarFile(path); printEnumeration(f.entries()); } catch (IOException ioe) { ioe.printStackTrace(); } } // printJarFileContents /** * Truncate a string to a certain number of characters. * * @param str * The string to truncate. * @param size * The maximum number of characters. * @param useEllipsis * If true, add "..." to the truncated string to show it's been * truncated. * @return The truncated String, with ellipsis if specified. */ public static String truncate(String str, int size, boolean useEllipsis) { String result = str; if (str != null && str.length() > size) { result = str.substring(0, size); if (useEllipsis) { result += "..."; } } return result; } // truncate /** * Pad (on the right) a string with a certain number of characters. * * @return The padded String. * @param size * The desired length of the final, padded string. * @param str * The String to add the padding to. * @param pad * The String to pad with. */ public static String pad(String str, String pad, int size) { StringBuffer result = new StringBuffer(str); int startSize = str.length(); for (int i = startSize; i < size; i++) { result.append(pad); } return result.toString(); } // pad /** * Read a text file. * * @param name * The name of the file to read. * @return An array of Strings representing the lines of the file. */ public static String[] readTextFile(String name) { List lines = new ArrayList(); BufferedReader br = null; try { br = new BufferedReader(new FileReader(name)); } catch (FileNotFoundException fe) { System.err.println("Cannot find " + name); fe.printStackTrace(); } String line; try { while ((line = br.readLine()) != null) { lines.add(line); } br.close(); } catch (Exception e) { e.printStackTrace(); } return (String[]) lines.toArray(new String[lines.size()]); } // readTextFile /** * Check if a String is in an array of Strings. The whole array is searched * (until a match is found); this is quite slow but does not require the array * to be sorted in any way beforehand. * * @param str * The String to search for. * @param a * The array to search through. * @param caseSensitive * If true, case sensitive searching is done. * @return true if str is in a. */ public static boolean stringInArray(String str, String[] a, boolean caseSensitive) { boolean result = false; for (int i = 0; i < a.length; i++) { if (caseSensitive) { if (a[i].equals(str)) { result = true; break; } } else { if (a[i].equalsIgnoreCase(str)) { result = true; break; } } } return result; } /** * Check if an object is in an array. The whole array is searched (until a * match is found); this is quite slow but does not require the array to be * sorted in any way beforehand. * * @param o * The Object to search for. * @param a * The array to search through. * @return true if o is in a. */ public static boolean objectInArray(Object o, Object[] a) { for (int i = 0; i < a.length; i++) { if (a[i].equals(o)) { return true; } } return false; } /** * Return an array containing all of the subdirectories of a given directory. * * @param parentDir * The directory to look in. * @return All the subdirectories (if any) in parentDir. */ public static String[] getSubDirs(String parentDir) { List dirs = new ArrayList(); File parentDirFile = new File(parentDir); String[] filesAndDirs = parentDirFile.list(); if (filesAndDirs != null) { for (int i = 0; i < filesAndDirs.length; i++) { File f = new File(parentDir + File.separator + filesAndDirs[i]); if (f.isDirectory()) { dirs.add(filesAndDirs[i]); } } } return (String[]) (dirs.toArray(new String[dirs.size()])); } /** * Remove the objects from one array that are present in another. * * @param source * The array to be filtered. * @param remove * An array of objects to be removed from source. * @return A new array containing all objects that are in source minus any * that are in remove. */ public static Object[] filterArray(Object[] source, Object[] remove) { List result = new ArrayList(); for (int i = 0; i < source.length; i++) { if (!objectInArray(source[i], remove)) { result.add(source[i]); } } return result.toArray(new Object[result.size()]); } /** * Format a time as hours, minutes and seconds. * * @param time * The time in ms, e.g. from System.currentTimeMillis() * @return The time formatted as e.g. 4 hours 2 min 3s. Hours is largest unit * currently supported. */ public static String formatTimeString(long time) { String s = ""; Calendar cal = new GregorianCalendar(); cal.setTimeInMillis(time); // TODO years etc // Calendar.HOUR starts from 1 if (cal.get(Calendar.HOUR_OF_DAY) > 1) { s += (cal.get(Calendar.HOUR_OF_DAY) - 1) + " hours "; } if (cal.get(Calendar.MINUTE) > 0) { s += cal.get(Calendar.MINUTE) + " min "; } if (cal.get(Calendar.SECOND) > 0) { s += cal.get(Calendar.SECOND) + "s "; } if (time < 1000) { s = time + "ms"; } return s; } /** * Delete a file. * * @param file * The file to delete. */ public static void deleteFile(String file) { File f = new File(file); f.delete(); } /** * Write/append a string to a file. * * @param file * The file to write to. * @param str * The string to write. * @param append * If true, append the string to the file if it already exists. * @param newLine * If true, add a new line character after writing */ public static void writeStringToFile(String file, String str, boolean append, boolean newLine) { try { FileWriter fw = new FileWriter(file, append); fw.write(str); if (newLine) { fw.write("\n"); } fw.close(); } catch (IOException ioe) { ioe.printStackTrace(); } } /** * Convert a list of Longs to an array of longs. */ public static long[] listToArrayLong(List list) { long[] array = new long[list.size()]; Iterator it = list.iterator(); int i = 0; while (it.hasNext()) { array[i++] = ((Long) it.next()).longValue(); } return array; } /** * Convert the first character of a string to upper case. Ignore the rest of * the string. */ public static String ucFirst(String str) { String first = str.substring(0, 1).toUpperCase(); String last = str.substring(1); return first + last; } public static String truncateDatabaseName(String db) { if (db.length() <= 27) { return db; } // change genus to single letter int underscoreIndex = db.indexOf("_"); String genusLetter = db.substring(0, 1); String result = genusLetter + "_" + db.substring(underscoreIndex + 1); // if length is *still* > 27, change long database type to an abbreviated // version if (result.length() > 27) { result = result.replaceAll("otherfeatures", "other..."); } return result; } public static String truncateTestName(String test) { return (test.length() <= 27) ? test : test.substring(0, 28); } public static String[] removeStringFromArray(String[] tables, String table) { String[] result = new String[tables.length - 1]; int j = 0; for (int i = 0; i < tables.length; i++) { if (!tables[i].equalsIgnoreCase(table)) { if (j < result.length) { result[j++] = tables[i]; } } } return result; } /** * Convert a list to a HashMap, where the key and value of each map element is the same as each list element. */ public static HashMap<Object,Object> listToMap(List<Object> list) { HashMap<Object,Object> map = new HashMap<Object,Object>(); for (Object o : list) { map.put(o, o); } return map; } } // Utils
package org.flymine.util; import junit.framework.*; public class StringUtilTest extends TestCase { public StringUtilTest(String arg1) { super(arg1); } public void testCountOccurancesNullStr() throws Exception { try { StringUtil.countOccurances(null, "A test string"); fail("Expected: NullPointerException"); } catch (NullPointerException e) { } } public void testCountOccurancesNullTarget() throws Exception { try { StringUtil.countOccurances("e", null); fail("Expected: NullPointerException"); } catch (NullPointerException e) { } } public void testCountOccurances() throws Exception { assertEquals(0,StringUtil.countOccurances("z", "A sentence without the required letter in it")); assertEquals(8,StringUtil.countOccurances("e", "A sentence with the required letter in it")); assertEquals(1,StringUtil.countOccurances("e", "effffffff")); assertEquals(1,StringUtil.countOccurances("e", "ffffffffffffff fffe")); } public void testCapitalise() throws Exception { assertEquals("A", StringUtil.capitalise("a")); assertEquals("A", StringUtil.capitalise("A")); assertEquals("Aaaa", StringUtil.capitalise("aaaa")); assertEquals("AaaaBbbb", StringUtil.capitalise("aaaaBbbb")); assertEquals("", StringUtil.capitalise("")); assertNull(StringUtil.capitalise(null)); } }
package org.lightmare.deploy.fs; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.nio.file.FileSystem; import java.nio.file.FileSystems; import java.nio.file.Path; import java.nio.file.StandardWatchEventKinds; import java.nio.file.WatchEvent; import java.nio.file.WatchEvent.Kind; import java.nio.file.WatchKey; import java.nio.file.WatchService; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.apache.log4j.Logger; import org.lightmare.cache.ConnectionContainer; import org.lightmare.cache.DeploymentDirectory; import org.lightmare.cache.MetaContainer; import org.lightmare.cache.RestContainer; import org.lightmare.config.Configuration; import org.lightmare.jpa.datasource.FileParsers; import org.lightmare.jpa.datasource.Initializer; import org.lightmare.rest.providers.RestProvider; import org.lightmare.utils.CollectionUtils; import org.lightmare.utils.LogUtils; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.concurrent.ThreadFactoryUtil; import org.lightmare.utils.fs.WatchUtils; /** * Deployment manager, {@link Watcher#deployFile(URL)}, * {@link Watcher#undeployFile(URL)}, {@link Watcher#listDeployments()} and * {@link File} modification event handler for deployments if java version is * 1.7 or above * * @author levan * @since 0.0.45-SNAPSHOT */ public class Watcher implements Runnable { private static final String DEPLOY_THREAD_NAME = "watch_thread"; private static final int DEPLOY_POOL_PRIORITY = Thread.MAX_PRIORITY - 5; private static final long SLEEP_TIME = 5500L; private static final ExecutorService DEPLOY_POOL = Executors .newSingleThreadExecutor(new ThreadFactoryUtil(DEPLOY_THREAD_NAME, DEPLOY_POOL_PRIORITY)); private Set<DeploymentDirectory> deployments; private Set<String> dataSources; private static final Logger LOG = Logger.getLogger(Watcher.class); /** * Defines file types for watch service * * @author Levan * @since 0.0.45-SNAPSHOT */ private static enum WatchFileType { DATA_SOURCE, DEPLOYMENT, NONE; } /** * To filter only deployed sub files from directory * * @author levan * @since 0.0.45-SNAPSHOT */ private static class DeployFiletr implements FileFilter { @Override public boolean accept(File file) { boolean accept; try { URL url = file.toURI().toURL(); url = WatchUtils.clearURL(url); accept = MetaContainer.chackDeployment(url); } catch (MalformedURLException ex) { LOG.error(ex.getMessage(), ex); accept = false; } catch (IOException ex) { LOG.error(ex.getMessage(), ex); accept = false; } return accept; } } private Watcher() { deployments = getDeployDirectories(); dataSources = getDataSourcePaths(); } /** * Clears and gets file {@link URL} by file name * * @param fileName * @return {@link URL} * @throws IOException */ private static URL getAppropriateURL(String fileName) throws IOException { File file = new File(fileName); URL url = file.toURI().toURL(); url = WatchUtils.clearURL(url); return url; } /** * Gets {@link Set} of {@link DeploymentDirectory} instances from * configuration * * @return {@link Set}<code><DeploymentDirectory></code> */ private static Set<DeploymentDirectory> getDeployDirectories() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<DeploymentDirectory> deploymetDirss = new HashSet<DeploymentDirectory>(); Set<DeploymentDirectory> deploymetDirssCurrent; for (Configuration config : configs) { deploymetDirssCurrent = config.getDeploymentPath(); if (config.isWatchStatus() && CollectionUtils.valid(deploymetDirssCurrent)) { deploymetDirss.addAll(deploymetDirssCurrent); } } return deploymetDirss; } /** * Gets {@link Set} of data source paths from configuration * * @return {@link Set}<code><String></code> */ private static Set<String> getDataSourcePaths() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<String> paths = new HashSet<String>(); Set<String> pathsCurrent; for (Configuration config : configs) { pathsCurrent = config.getDataSourcePath(); if (config.isWatchStatus() && CollectionUtils.valid(pathsCurrent)) { paths.addAll(pathsCurrent); } } return paths; } /** * Checks and gets appropriated {@link WatchFileType} by passed file name * * @param fileName * @return {@link WatchFileType} */ private static WatchFileType checkType(String fileName) { WatchFileType type; File file = new File(fileName); String path = file.getPath(); String filePath = WatchUtils.clearPath(path); path = file.getParent(); String parentPath = WatchUtils.clearPath(path); Set<DeploymentDirectory> apps = getDeployDirectories(); Set<String> dss = getDataSourcePaths(); if (CollectionUtils.valid(apps)) { String deploymantPath; Iterator<DeploymentDirectory> iterator = apps.iterator(); boolean notDeployment = Boolean.TRUE; DeploymentDirectory deployment; while (iterator.hasNext() && notDeployment) { deployment = iterator.next(); deploymantPath = deployment.getPath(); notDeployment = ObjectUtils.notEquals(deploymantPath, parentPath); } if (notDeployment) { type = WatchFileType.NONE; } else { type = WatchFileType.DEPLOYMENT; } } else if (CollectionUtils.valid(dss) && dss.contains(filePath)) { type = WatchFileType.DATA_SOURCE; } else { type = WatchFileType.NONE; } return type; } private static void fillFileList(File[] files, List<File> list) { if (CollectionUtils.valid(files)) { for (File file : files) { list.add(file); } } } /** * Lists all deployed {@link File}s * * @return {@link List}<File> */ public static List<File> listDeployments() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<DeploymentDirectory> deploymetDirss = new HashSet<DeploymentDirectory>(); Set<DeploymentDirectory> deploymetDirssCurrent; for (Configuration config : configs) { deploymetDirssCurrent = config.getDeploymentPath(); if (CollectionUtils.valid(deploymetDirssCurrent)) { deploymetDirss.addAll(deploymetDirssCurrent); } } File[] files; List<File> list = new ArrayList<File>(); if (CollectionUtils.valid(deploymetDirss)) { String path; DeployFiletr filter = new DeployFiletr(); for (DeploymentDirectory deployment : deploymetDirss) { path = deployment.getPath(); files = new File(path).listFiles(filter); fillFileList(files, list); } } return list; } /** * Lists all data source {@link File}s * * @return {@link List}<File> */ public static List<File> listDataSources() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<String> paths = new HashSet<String>(); Set<String> pathsCurrent; for (Configuration config : configs) { pathsCurrent = config.getDataSourcePath(); if (CollectionUtils.valid(pathsCurrent)) { paths.addAll(pathsCurrent); } } File file; List<File> list = new ArrayList<File>(); if (CollectionUtils.valid(paths)) { for (String path : paths) { file = new File(path); list.add(file); } } return list; } /** * Deploys application or data source file by passed file name * * @param fileName * @throws IOException */ public static void deployFile(String fileName) throws IOException { WatchFileType type = checkType(fileName); if (type.equals(WatchFileType.DATA_SOURCE)) { FileParsers fileParsers = new FileParsers(); fileParsers.parseStandaloneXml(fileName); } else if (type.equals(WatchFileType.DEPLOYMENT)) { URL url = getAppropriateURL(fileName); deployFile(url); } } /** * Deploys application or data source file by passed {@link URL} instance * * @param url * @throws IOException */ public static void deployFile(URL url) throws IOException { URL[] archives = { url }; MetaContainer.getCreator().scanForBeans(archives); } /** * Removes from deployments application or data source file by passed * {@link URL} instance * * @param url * @throws IOException */ public static void undeployFile(URL url) throws IOException { boolean valid = MetaContainer.undeploy(url); if (valid && RestContainer.hasRest()) { RestProvider.reload(); } } /** * Removes from deployments application or data source file by passed file * name * * @param fileName * @throws IOException */ public static void undeployFile(String fileName) throws IOException { WatchFileType type = checkType(fileName); if (type.equals(WatchFileType.DATA_SOURCE)) { Initializer.undeploy(fileName); } else if (type.equals(WatchFileType.DEPLOYMENT)) { URL url = getAppropriateURL(fileName); undeployFile(url); } } /** * Removes from deployments and deploys again application or data source * file by passed file name * * @param fileName * @throws IOException */ public static void redeployFile(String fileName) throws IOException { undeployFile(fileName); deployFile(fileName); } /** * Handles file change event * * @param dir * @param currentEvent * @throws IOException */ private void handleEvent(Path dir, WatchEvent<Path> currentEvent) throws IOException { if (ObjectUtils.notNull(currentEvent)) { Path prePath = currentEvent.context(); Path path = dir.resolve(prePath); String fileName = path.toString(); int count = currentEvent.count(); Kind<?> kind = currentEvent.kind(); if (kind == StandardWatchEventKinds.ENTRY_MODIFY) { LogUtils.info(LOG, "Modify: %s, count: %s\n", fileName, count); redeployFile(fileName); } else if (kind == StandardWatchEventKinds.ENTRY_DELETE) { LogUtils.info(LOG, "Delete: %s, count: %s\n", fileName, count); undeployFile(fileName); } else if (kind == StandardWatchEventKinds.ENTRY_CREATE) { LogUtils.info(LOG, "Create: %s, count: %s\n", fileName, count); redeployFile(fileName); } } } /** * Runs file watch service * * @param watch * @throws IOException */ private void runService(WatchService watch) throws IOException { Path dir; boolean toRun = true; boolean valid; while (toRun) { try { WatchKey key; key = watch.take(); List<WatchEvent<?>> events = key.pollEvents(); WatchEvent<?> currentEvent = null; WatchEvent<Path> typedCurrentEvent; int times = 0; dir = (Path) key.watchable(); for (WatchEvent<?> event : events) { if (event.kind() == StandardWatchEventKinds.OVERFLOW) { continue; } if (times == 0 || event.count() > currentEvent.count()) { currentEvent = event; } times++; valid = key.reset(); toRun = valid && key.isValid(); if (toRun) { Thread.sleep(SLEEP_TIME); typedCurrentEvent = ObjectUtils.cast(currentEvent); handleEvent(dir, typedCurrentEvent); } } } catch (InterruptedException ex) { throw new IOException(ex); } } } private void registerPath(FileSystem fs, String path, WatchService watch) throws IOException { Path deployPath = fs.getPath(path); deployPath.register(watch, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_MODIFY, StandardWatchEventKinds.OVERFLOW, StandardWatchEventKinds.ENTRY_DELETE); runService(watch); } private void registerPaths(File[] files, FileSystem fs, WatchService watch) throws IOException { String path; for (File file : files) { path = file.getPath(); registerPath(fs, path, watch); } } /** * Registers deployments directories to watch service * @param deploymentDirss * @param fs * @param watch * @throws IOException */ private void registerPaths(Collection<DeploymentDirectory> deploymentDirss, FileSystem fs, WatchService watch) throws IOException { String path; boolean scan; File directory; File[] files; for (DeploymentDirectory deployment : deploymentDirss) { path = deployment.getPath(); scan = deployment.isScan(); if (scan) { directory = new File(path); files = directory.listFiles(); if (CollectionUtils.valid(files)) { registerPaths(files, fs, watch); } } else { registerPath(fs, path, watch); } } } /** * Registers data source path to watch service * * @param paths * @param fs * @param watch * @throws IOException */ private void registerDsPaths(Collection<String> paths, FileSystem fs, WatchService watch) throws IOException { for (String path : paths) { registerPath(fs, path, watch); } } @Override public void run() { try { FileSystem fs = FileSystems.getDefault(); WatchService watch = null; try { watch = fs.newWatchService(); } catch (IOException ex) { LOG.error(ex.getMessage(), ex); throw ex; } if (CollectionUtils.valid(deployments)) { registerPaths(deployments, fs, watch); } if (CollectionUtils.valid(dataSources)) { registerDsPaths(dataSources, fs, watch); } } catch (IOException ex) { LOG.fatal(ex.getMessage(), ex); LOG.fatal("system going to shut down cause of hot deployment"); try { ConnectionContainer.closeConnections(); } catch (IOException iex) { LOG.fatal(iex.getMessage(), iex); } System.exit(-1); } finally { DEPLOY_POOL.shutdown(); } } /** * Starts watch service for application and data source files */ public static void startWatch() { Watcher watcher = new Watcher(); DEPLOY_POOL.submit(watcher); } }
package io.airlift.http.client.spnego; import com.google.common.collect.ImmutableMap; import com.sun.security.auth.module.Krb5LoginModule; import io.airlift.http.client.KerberosNameType; import io.airlift.log.Logger; import io.airlift.units.Duration; import org.eclipse.jetty.client.api.Authentication; import org.eclipse.jetty.client.api.ContentResponse; import org.eclipse.jetty.client.api.Request; import org.eclipse.jetty.http.HttpHeader; import org.eclipse.jetty.util.Attributes; import org.ietf.jgss.GSSContext; import org.ietf.jgss.GSSCredential; import org.ietf.jgss.GSSException; import org.ietf.jgss.GSSManager; import org.ietf.jgss.Oid; import javax.annotation.concurrent.GuardedBy; import javax.security.auth.Subject; import javax.security.auth.login.AppConfigurationEntry; import javax.security.auth.login.Configuration; import javax.security.auth.login.LoginContext; import javax.security.auth.login.LoginException; import java.io.File; import java.io.UncheckedIOException; import java.net.InetAddress; import java.net.URI; import java.net.UnknownHostException; import java.security.Principal; import java.security.PrivilegedAction; import java.util.Base64; import java.util.Locale; import java.util.concurrent.TimeUnit; import static com.google.common.base.Preconditions.checkState; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag.REQUIRED; import static org.ietf.jgss.GSSContext.INDEFINITE_LIFETIME; import static org.ietf.jgss.GSSCredential.DEFAULT_LIFETIME; import static org.ietf.jgss.GSSCredential.INITIATE_ONLY; import static org.ietf.jgss.GSSName.NT_USER_NAME; public class SpnegoAuthentication implements Authentication { private static final String NEGOTIATE = HttpHeader.NEGOTIATE.asString(); private static final Logger LOG = Logger.get(SpnegoAuthentication.class); private static final Duration MIN_CREDENTIAL_LIFE_TIME = new Duration(60, TimeUnit.SECONDS); private static final GSSManager GSS_MANAGER = GSSManager.getInstance(); private static final Oid SPNEGO_OID; private static final Oid KERBEROS_OID; static { try { SPNEGO_OID = new Oid("1.3.6.1.5.5.2"); KERBEROS_OID = new Oid("1.2.840.113554.1.2.2"); } catch (GSSException e) { throw new AssertionError(e); } } private final File keytab; private final File credentialCache; private final String servicePrincipalPattern; private final String principal; private final String remoteServiceName; private final boolean useCanonicalHostname; private final Oid nameType; @GuardedBy("this") private Session clientSession; public SpnegoAuthentication( File keytab, File kerberosConfig, File credentialCache, String servicePrincipalPattern, String principal, String remoteServiceName, KerberosNameType nameType, boolean useCanonicalHostname) { requireNonNull(kerberosConfig, "Kerberos config path is null"); requireNonNull(remoteServiceName, "Kerberos remote service name is null"); requireNonNull(nameType, "GSS name type is null"); this.keytab = keytab; this.credentialCache = credentialCache; this.servicePrincipalPattern = servicePrincipalPattern; this.principal = principal; this.remoteServiceName = remoteServiceName; this.nameType = nameType.getOid(); this.useCanonicalHostname = useCanonicalHostname; System.setProperty("java.security.krb5.conf", kerberosConfig.getAbsolutePath()); } @Override public Result authenticate(Request request, ContentResponse response, HeaderInfo headerInfo, Attributes attributes) { URI normalizedUri = UriUtil.normalizedUri(request.getURI()); return new Result() { @Override public URI getURI() { return normalizedUri; } @Override public void apply(Request request) { GSSContext context = null; try { String servicePrincipal = makeServicePrincipal(servicePrincipalPattern, remoteServiceName, normalizedUri.getHost(), useCanonicalHostname); Session session = getSession(); context = doAs(session.getLoginContext().getSubject(), () -> { GSSContext result = GSS_MANAGER.createContext( GSS_MANAGER.createName(servicePrincipal, nameType), SPNEGO_OID, session.getClientCredential(), INDEFINITE_LIFETIME); result.requestMutualAuth(true); result.requestConf(true); result.requestInteg(true); result.requestCredDeleg(false); return result; }); byte[] token = context.initSecContext(new byte[0], 0, 0); if (token != null) { request.header(headerInfo.getHeader(), format("%s %s", NEGOTIATE, Base64.getEncoder().encodeToString(token))); } else { throw new RuntimeException(format("No token generated from GSS context for %s", request.getURI())); } } catch (GSSException e) { throw new RuntimeException(format("Failed to establish GSSContext for request %s", request.getURI()), e); } catch (LoginException e) { throw new RuntimeException(format("Failed to establish LoginContext for request %s", request.getURI()), e); } finally { try { if (context != null) { context.dispose(); } } catch (GSSException e) { // ignore } } } }; } @Override public boolean matches(String type, URI uri, String realm) { // The class matches all requests for Negotiate scheme. Realm is not used for now return NEGOTIATE.equalsIgnoreCase(type); } private synchronized Session getSession() throws LoginException, GSSException { if (clientSession == null || clientSession.getClientCredential().getRemainingLifetime() < MIN_CREDENTIAL_LIFE_TIME.getValue(TimeUnit.SECONDS)) { // TODO: do we need to call logout() on the LoginContext? LoginContext loginContext = new LoginContext("", null, null, new Configuration() { @Override public AppConfigurationEntry[] getAppConfigurationEntry(String name) { ImmutableMap.Builder<String, String> optionsBuilder = ImmutableMap.builder(); optionsBuilder.put("refreshKrb5Config", "true"); optionsBuilder.put("doNotPrompt", "true"); optionsBuilder.put("useKeyTab", "true"); if (LOG.isDebugEnabled()) { optionsBuilder.put("debug", "true"); } if (keytab != null) { optionsBuilder.put("keyTab", keytab.getAbsolutePath()); } if (credentialCache != null) { optionsBuilder.put("ticketCache", credentialCache.getAbsolutePath()); optionsBuilder.put("useTicketCache", "true"); optionsBuilder.put("renewTGT", "true"); } if (principal != null) { optionsBuilder.put("principal", principal); } return new AppConfigurationEntry[] { new AppConfigurationEntry(Krb5LoginModule.class.getName(), REQUIRED, optionsBuilder.build()) }; } }); loginContext.login(); Subject subject = loginContext.getSubject(); Principal clientPrincipal = subject.getPrincipals().iterator().next(); GSSCredential clientCredential = doAs(subject, () -> GSS_MANAGER.createCredential( GSS_MANAGER.createName(clientPrincipal.getName(), NT_USER_NAME), DEFAULT_LIFETIME, KERBEROS_OID, INITIATE_ONLY)); clientSession = new Session(loginContext, clientCredential); } return clientSession; } private static String makeServicePrincipal(String servicePrincipalPattern, String serviceName, String hostName, boolean useCanonicalHostname) { String serviceHostName = hostName; if (useCanonicalHostname) { serviceHostName = canonicalizeServiceHostname(hostName); } return servicePrincipalPattern.replaceAll("\\$\\{SERVICE}", serviceName).replaceAll("\\$\\{HOST}", serviceHostName.toLowerCase(Locale.US)); } private static String canonicalizeServiceHostname(String hostName) { try { InetAddress address = InetAddress.getByName(hostName); String fullHostName; if ("localhost".equalsIgnoreCase(address.getHostName())) { fullHostName = InetAddress.getLocalHost().getCanonicalHostName(); } else { fullHostName = address.getCanonicalHostName(); } checkState(!fullHostName.equalsIgnoreCase("localhost"), "Fully qualified name of localhost should not resolve to 'localhost'. System configuration error?"); return fullHostName; } catch (UnknownHostException e) { throw new UncheckedIOException(e); } } private interface GssSupplier<T> { T get() throws GSSException; } private static <T> T doAs(Subject subject, GssSupplier<T> action) { return Subject.doAs(subject, (PrivilegedAction<T>) () -> { try { return action.get(); } catch (GSSException e) { throw new RuntimeException(e); } }); } private static class Session { private final LoginContext loginContext; private final GSSCredential clientCredential; Session(LoginContext loginContext, GSSCredential clientCredential) { requireNonNull(loginContext, "loginContext is null"); requireNonNull(clientCredential, "gssCredential is null"); this.loginContext = loginContext; this.clientCredential = clientCredential; } LoginContext getLoginContext() { return loginContext; } GSSCredential getClientCredential() { return clientCredential; } } }
package org.exist.xquery; import java.text.Collator; import java.util.Iterator; import org.exist.EXistException; import org.exist.dom.ContextItem; import org.exist.dom.DocumentImpl; import org.exist.dom.DocumentSet; import org.exist.dom.ExtArrayNodeSet; import org.exist.dom.NodeProxy; import org.exist.dom.NodeSet; import org.exist.dom.VirtualNodeSet; import org.exist.storage.DBBroker; import org.exist.storage.FulltextIndexSpec; import org.exist.storage.IndexSpec; import org.exist.storage.Indexable; import org.exist.xquery.util.ExpressionDumper; import org.exist.xquery.value.AtomicValue; import org.exist.xquery.value.BooleanValue; import org.exist.xquery.value.Item; import org.exist.xquery.value.Sequence; import org.exist.xquery.value.SequenceIterator; import org.exist.xquery.value.Type; /** * A general XQuery/XPath2 comparison expression. * * @author wolf */ public class GeneralComparison extends BinaryOp { /** * The type of operator used for the comparison, i.e. =, !=, &lt;, &gt; ... * One of the constants declared in class {@link Constants}. */ protected int relation = Constants.EQ; /** * Truncation flags: when comparing with a string value, the search * string may be truncated with a single * wildcard. See the constants declared * in class {@link Constants}. * * The standard functions starts-with, ends-with and contains are * transformed into a general comparison with wildcard. Hence the need * to consider wildcards here. */ protected int truncation = Constants.TRUNC_NONE; /** * The class might cache the entire results of a previous execution. */ protected CachedResult cached = null; /** * Extra argument (to standard functions starts-with/contains etc.) * to indicate the collation to be used for string comparisons. */ protected Expression collationArg = null; /** * Set to true if this expression is called within the where clause * of a FLWOR expression. */ protected boolean inWhereClause = false; public GeneralComparison(XQueryContext context, int relation) { this(context, relation, Constants.TRUNC_NONE); } public GeneralComparison(XQueryContext context, int relation, int truncation) { super(context); this.relation = relation; } public GeneralComparison(XQueryContext context, Expression left, Expression right, int relation) { this(context, left, right, relation, Constants.TRUNC_NONE); } public GeneralComparison(XQueryContext context, Expression left, Expression right, int relation, int truncation) { super(context); this.relation = relation; this.truncation = truncation; // simplify arguments if (left instanceof PathExpr && ((PathExpr) left).getLength() == 1) { context.getProfiler().message(this, Profiler.OPTIMIZATIONS, "OPTIMIZATION", "Simplifying left argument"); left = ((PathExpr) left).getExpression(0); } add(left); if (right instanceof PathExpr && ((PathExpr) right).getLength() == 1) { context.getProfiler().message(this, Profiler.OPTIMIZATIONS, "OPTIMIZATION", "Simplifying right argument"); right = ((PathExpr) right).getExpression(0); } add(right); //TODO : should we also use simplify() here ? -pb } /* (non-Javadoc) * @see org.exist.xquery.BinaryOp#analyze(org.exist.xquery.AnalyzeContextInfo) */ public void analyze(AnalyzeContextInfo contextInfo) throws XPathException { contextInfo.setParent(this); super.analyze(contextInfo); inWhereClause = (contextInfo.getFlags() & IN_WHERE_CLAUSE) != 0; } /* (non-Javadoc) * @see org.exist.xquery.BinaryOp#returnsType() */ public int returnsType() { //Ugly workaround for the polysemy of "." which is expanded as self::node() even when it is not relevant boolean invalidNodeEvaluation = getLeft() instanceof LocationStep && ((LocationStep)getLeft()).axis == Constants.SELF_AXIS; if (inPredicate && !invalidNodeEvaluation && (!Dependency.dependsOn(getDependencies(), Dependency.CONTEXT_ITEM))) { /* If one argument is a node set we directly * return the matching nodes from the context set. This works * only inside predicates. */ return Type.NODE; } // In all other cases, we return boolean return Type.BOOLEAN; } /* (non-Javadoc) * @see org.exist.xquery.AbstractExpression#getDependencies() */ public int getDependencies() { final int leftDeps = getLeft().getDependencies(); // left expression returns node set if (Type.subTypeOf(getLeft().returnsType(), Type.NODE) && // and does not depend on the context item (leftDeps & Dependency.CONTEXT_ITEM) == 0 && (!inWhereClause || (leftDeps & Dependency.CONTEXT_VARS) == 0)) { return Dependency.CONTEXT_SET; } else { return Dependency.CONTEXT_SET + Dependency.CONTEXT_ITEM; } } /* (non-Javadoc) * @see org.exist.xquery.Expression#eval(org.exist.xquery.StaticContext, org.exist.dom.DocumentSet, org.exist.xquery.value.Sequence, org.exist.xquery.value.Item) */ public Sequence eval(Sequence contextSequence, Item contextItem) throws XPathException { //Profiling for eval if (context.getProfiler().isEnabled()) { context.getProfiler().start(this); context.getProfiler().message(this, Profiler.DEPENDENCIES, "DEPENDENCIES", Dependency.getDependenciesName(this.getDependencies())); if (contextSequence != null) context.getProfiler().message(this, Profiler.START_SEQUENCES, "CONTEXT SEQUENCE", contextSequence); if (contextItem != null) context.getProfiler().message(this, Profiler.START_SEQUENCES, "CONTEXT ITEM", contextItem.toSequence()); } Sequence result = null; //Ugly workaround for the polysemy of "." which is expanded as self::node() even when it is not relevant boolean invalidNodeEvaluation = contextSequence != null && !Type.subTypeOf(contextSequence.getItemType(), Type.NODE) && getLeft() instanceof LocationStep && ((LocationStep)getLeft()).axis == Constants.SELF_AXIS; /* * If we are inside a predicate and one of the arguments is a node set, * we try to speed up the query by returning nodes from the context set. * This works only inside a predicate. The node set will always be the left * operand. */ if (inPredicate && !invalidNodeEvaluation) { if (!(Dependency.dependsOn(getDependencies(), Dependency.CONTEXT_ITEM))&& Type.subTypeOf(getLeft().returnsType(), Type.NODE)) { if(contextItem != null) contextSequence = contextItem.toSequence(); /* * TODO quickNodeSetCompare() is NOT being called for xqueries like - * collection("/db/CommunityDirectory/data")/communitygroup[validation/lastapproved/date = current-dateTime()] * collection("/db/CommunityDirectory/data")/communitygroup[validation/lastapproved/date = ("2005-12-20T16:39:00" cast as xs:dateTime)] * but is being called for xqueries like - * collection("/db/CommunityDirectory/data")/communitygroup[validation/lastapproved/date = "2005-12-20T16:39:00"] * collection("/db/CommunityDirectory/data")/communitygroup[validation/lastapproved/date = "2005-12-20T16:39:00" cast as xs:dateTime] * - but due to the string type of the key falls back to nodeSetCompare() * * deliriumsky */ if ((getRight().getDependencies() & Dependency.CONTEXT_ITEM) == 0 /*&& (getRight().getCardinality() & Cardinality.MANY) == 0*/) //changed to allow multiple right cardinality into () - deliriumsky { if (context.getProfiler().isEnabled()) context.getProfiler().message(this, Profiler.OPTIMIZATION_FLAGS, "OPTIMIZATION CHOICE", "quickNodeSetCompare"); result = quickNodeSetCompare(contextSequence); } else { if (context.getProfiler().isEnabled()) context.getProfiler().message(this, Profiler.OPTIMIZATION_FLAGS, "OPTIMIZATION CHOICE", "nodeSetCompare"); result = nodeSetCompare(contextSequence); } } } if(result == null) { if (context.getProfiler().isEnabled()) context.getProfiler().message(this, Profiler.OPTIMIZATION_FLAGS, "OPTIMIZATION CHOICE", "genericCompare"); result = genericCompare(contextSequence, contextItem); } if (context.getProfiler().isEnabled()) context.getProfiler().end(this, "", result); return result; } /** * Generic, slow implementation. Applied if none of the possible * optimizations can be used. * * @param contextSequence * @param contextItem * @return * @throws XPathException */ protected Sequence genericCompare(Sequence contextSequence, Item contextItem) throws XPathException { Sequence ls = getLeft().eval(contextSequence, contextItem); Sequence rs = getRight().eval(contextSequence, contextItem); Collator collator = getCollator(contextSequence); AtomicValue lv, rv; if (ls.getLength() == 1 && rs.getLength() == 1) { lv = ls.itemAt(0).atomize(); rv = rs.itemAt(0).atomize(); return BooleanValue.valueOf(compareValues(collator, lv, rv)); } else { for (SequenceIterator i1 = ls.iterate(); i1.hasNext();) { lv = i1.nextItem().atomize(); if (rs.getLength() == 1 && compareValues(collator, lv, rs.itemAt(0).atomize())) return BooleanValue.TRUE; else { for (SequenceIterator i2 = rs.iterate(); i2.hasNext();) { rv = i2.nextItem().atomize(); if (compareValues(collator, lv, rv)) return BooleanValue.TRUE; } } } } return BooleanValue.FALSE; } /** * Optimized implementation, which can be applied if the left operand * returns a node set. In this case, the left expression is executed first. * All matching context nodes are then passed to the right expression. */ protected Sequence nodeSetCompare(Sequence contextSequence) throws XPathException { NodeSet nodes = (NodeSet) getLeft().eval(contextSequence); return nodeSetCompare(nodes, contextSequence); } protected Sequence nodeSetCompare(NodeSet nodes, Sequence contextSequence) throws XPathException { NodeSet result = new ExtArrayNodeSet(); NodeProxy current; ContextItem c; Sequence rs; AtomicValue lv, rv; Collator collator = getCollator(contextSequence); if(contextSequence != null && contextSequence != Sequence.EMPTY_SEQUENCE) { for (Iterator i = nodes.iterator(); i.hasNext();) { current = (NodeProxy) i.next(); c = current.getContext(); if(c == null) throw new XPathException(getASTNode(), "Internal error: context node missing"); lv = current.atomize(); //TODO : review to consider transverse context do { rs = getRight().eval(c.getNode().toSequence()); for (SequenceIterator si = rs.iterate(); si.hasNext();) { rv = si.nextItem().atomize(); if (compareValues(collator, lv, rv)) { result.add(current); } } }while ((c = c.getNextDirect()) != null); } } else { for (Iterator i = nodes.iterator(); i.hasNext();) { current = (NodeProxy) i.next(); lv = current.atomize(); rs = getRight().eval(null); for (SequenceIterator si = rs.iterate(); si.hasNext();) { rv = si.nextItem().atomize(); if (compareValues(collator, lv, rv)) { result.add(current); } } } } return result; } /** * Optimized implementation: first checks if a range index is defined * on the nodes in the left argument. If that fails, check if we can use * the fulltext index to speed up the search. Otherwise, fall back to * {@link #nodeSetCompare(NodeSet, Sequence)}. */ protected Sequence quickNodeSetCompare(Sequence contextSequence) throws XPathException { /* TODO think about optimising fallback to NodeSetCompare() in the for loop!!! * At the moment when we fallback to NodeSetCompare() we are in effect throwing away any nodes * we have already processed in quickNodeSetCompare() and reprocessing all the nodes in NodeSetCompare(). * Instead - Could we create a NodeCompare() (based on NodeSetCompare() code) to only compare a single node and then union the result? * - deliriumsky */ /* TODO think about caching of results in this function... * also examine and check if correct (line near the end) - * boolean canCache = contextSequence instanceof NodeSet && (getRight().getDependencies() & Dependency.VARS) == 0 && (getLeft().getDependencies() & Dependency.VARS) == 0; * - deliriumsky */ // if the context sequence hasn't changed we can return a cached result if(cached != null && cached.isValid(contextSequence)) { if(context.getProfiler().isEnabled()) { context.getProfiler().message(this, Profiler.OPTIMIZATIONS, "OPTIMIZATION", "Returned cached result"); } return(cached.getResult()); } //get the NodeSet on the left NodeSet nodes = (NodeSet) getLeft().eval(contextSequence); if(!(nodes instanceof VirtualNodeSet) && nodes.getLength() == 0) //nothing on the left, so nothing to do { return(Sequence.EMPTY_SEQUENCE); } //get the Sequence on the right Sequence rightSeq = getRight().eval(contextSequence); if(rightSeq.getLength() == 0) //nothing on the right, so nothing to do { return(Sequence.EMPTY_SEQUENCE); } //Holds the result NodeSet result = null; //get the type of a possible index int indexType = nodes.getIndexType(); //See if we have a range index defined on the nodes in this sequence //TODO : use isSubType ??? -pb if(indexType != Type.ITEM) { //Get the documents from the node set DocumentSet docs = nodes.getDocumentSet(); //Iterate through the right hand sequence for(SequenceIterator itRightSeq = rightSeq.iterate(); itRightSeq.hasNext();) { //Get the index Key Item key = itRightSeq.nextItem().atomize(); //if key has truncation convert to string if(truncation != Constants.TRUNC_NONE) { //truncation is only possible on strings key = key.convertTo(Type.STRING); } //else if key is not the same type as the index //TODO : use isSubType ??? -pb else if(key.getType() != indexType) { //try and convert the key to the index type try { key = key.convertTo(indexType); } catch(XPathException xpe) { //Could not convert the key to a suitable type for the index, fallback to nodeSetCompare() if(context.getProfiler().isEnabled()) { context.getProfiler().message(this, Profiler.OPTIMIZATION_FLAGS, "OPTIMIZATION FALLBACK", "nodeSetCompare"); } return nodeSetCompare(nodes, contextSequence); } } // If key implements org.exist.storage.Indexable, we can use the index if(key instanceof Indexable && Type.subTypeOf(key.getType(), indexType)) { if(truncation == Constants.TRUNC_NONE) { //key without truncation, find key context.getProfiler().message(this, Profiler.OPTIMIZATIONS, "OPTIMIZATION", "Using value index to find key '" + Type.getTypeName(key.getType()) + "(" + key.getStringValue() + ")'"); if(result == null) //if first iteration { result = context.getBroker().getValueIndex().find(relation, docs, nodes, (Indexable)key); } else { result = result.union(context.getBroker().getValueIndex().find(relation, docs, nodes, (Indexable)key)); } } else { //key with truncation, match key context.getProfiler().message(this, Profiler.OPTIMIZATIONS, "OPTIMIZATION", "Using value index to match key '" + Type.getTypeName(key.getType()) + "(" + key.getStringValue() + ")'"); try { if(result == null) //if first iteration { result = context.getBroker().getValueIndex().match(docs, nodes, key.getStringValue().replace('%', '*'), DBBroker.MATCH_WILDCARDS); } else { result = result.union(context.getBroker().getValueIndex().match(docs, nodes, key.getStringValue().replace('%', '*'), DBBroker.MATCH_WILDCARDS)); } } catch (EXistException e) { throw new XPathException(getASTNode(), e.getMessage(), e); } } } else { //the datatype of our key does not //implement org.exist.storage.Indexable or is not of the correct type if(context.getProfiler().isEnabled()) { context.getProfiler().message(this, Profiler.OPTIMIZATION_FLAGS, "OPTIMIZATION FALLBACK", "nodeSetCompare"); } return(nodeSetCompare(nodes, contextSequence)); } //removed by Pierrick Brihaye //REMOVED : a *general* comparison should not be dependant of the settings of a fulltext index /* } else if (key.getType() == Type.ATOMIC || Type.subTypeOf(key.getType(), Type.STRING)) { if (!nodes.hasMixedContent() && relation == Constants.EQ && nodes.hasTextIndex()) { // we can use the fulltext index String cmp = rightSeq.getStringValue(); if(cmp.length() < NativeTextEngine.MAX_WORD_LENGTH) nodes = useFulltextIndex(cmp, nodes, docs); // now compare the input node set to the search expression result = context.getBroker().getNodesEqualTo(nodes, docs, relation, truncation, cmp, getCollator(contextSequence)); } else { // no usable index found. Fall back to a sequential scan of the nodes result = context.getBroker().getNodesEqualTo(nodes, docs, relation, truncation, rightSeq.getStringValue(), getCollator(contextSequence)); } */ /* end */ } } else { //no range index defined on the nodes in this sequence, so fallback to nodeSetCompare if(context.getProfiler().isEnabled()) { context.getProfiler().message(this, Profiler.OPTIMIZATION_FLAGS, "OPTIMIZATION FALLBACK", "nodeSetCompare"); } return(nodeSetCompare(nodes, contextSequence)); } // can this result be cached? Don't cache if the result depends on local variables. boolean canCache = contextSequence instanceof NodeSet && (getRight().getDependencies() & Dependency.VARS) == 0 && (getLeft().getDependencies() & Dependency.VARS) == 0; if(canCache) { cached = new CachedResult((NodeSet)contextSequence, result); } //return the result of the range index lookup(s) :-) return result; } //removed by Pierrick Brihaye /* protected NodeSet useFulltextIndex(String cmp, NodeSet nodes, DocumentSet docs) throws XPathException { // LOG.debug("Using fulltext index for expression " + ExpressionDumper.dump(this)); String cmpCopy = cmp; // try to use a fulltext search expression to reduce the number // of potential nodes to scan through SimpleTokenizer tokenizer = new SimpleTokenizer(); tokenizer.setText(cmp); TextToken token; String term; boolean foundNumeric = false; // setup up an &= expression using the fulltext index ExtFulltext containsExpr = new ExtFulltext(context, Constants.FULLTEXT_AND); containsExpr.setASTNode(getASTNode()); // disable default match highlighting int oldFlags = context.getBroker().getTextEngine().getTrackMatches(); context.getBroker().getTextEngine().setTrackMatches(Serializer.TAG_NONE); int i = 0; for (; i < 5 && (token = tokenizer.nextToken(false)) != null; i++) { // remember if we find an alphanumeric token if (token.getType() == TextToken.ALPHANUM) foundNumeric = true; } // check if all elements are indexed. If not, we can't use the // fulltext index. if (foundNumeric) foundNumeric = checkArgumentTypes(context, docs); if ((!foundNumeric) && i > 0) { // all elements are indexed: use the fulltext index cmp = handleTruncation(cmp); containsExpr.addTerm(new LiteralValue(context, new StringValue(cmp))); nodes = (NodeSet) containsExpr.eval(nodes, null); } context.getBroker().getTextEngine().setTrackMatches(oldFlags); cmp = cmpCopy; return nodes; } private String handleTruncation(String cmp) { switch (truncation) { case Constants.TRUNC_RIGHT: return cmp + '*'; case Constants.TRUNC_LEFT: return '*' + cmp; case Constants.TRUNC_BOTH: return '*' + cmp + '*'; default: return cmp; } } */ /** * Cast the atomic operands into a comparable type * and compare them. */ protected boolean compareValues(Collator collator, AtomicValue lv, AtomicValue rv) throws XPathException { try { return compareAtomic(collator, lv, rv, context.isBackwardsCompatible(), truncation, relation); } catch (XPathException e) { e.setASTNode(getASTNode()); throw e; } } public static boolean compareAtomic(Collator collator, AtomicValue lv, AtomicValue rv, boolean backwardsCompatible, int truncation, int relation) throws XPathException{ int ltype = lv.getType(); int rtype = rv.getType(); if (ltype == Type.ITEM || ltype == Type.ATOMIC) { if (Type.subTypeOf(rtype, Type.NUMBER)) { if(isEmptyString(lv)) return false; lv = lv.convertTo(Type.DOUBLE); } else if (rtype == Type.ITEM || rtype == Type.ATOMIC) { lv = lv.convertTo(Type.STRING); rv = rv.convertTo(Type.STRING); } else lv = lv.convertTo(rv.getType()); } else if (rtype == Type.ITEM || rtype == Type.ATOMIC) { if (Type.subTypeOf(ltype, Type.NUMBER)) { if(isEmptyString(lv)) return false; rv = rv.convertTo(Type.DOUBLE); } else if (rtype == Type.ITEM || rtype == Type.ATOMIC) { lv = lv.convertTo(Type.STRING); rv = rv.convertTo(Type.STRING); } else rv = rv.convertTo(lv.getType()); } if (backwardsCompatible) { // in XPath 1.0 compatible mode, if one of the operands is a number, cast // both operands to xs:double if (Type.subTypeOf(ltype, Type.NUMBER) || Type.subTypeOf(rtype, Type.NUMBER)) { lv = lv.convertTo(Type.DOUBLE); rv = rv.convertTo(Type.DOUBLE); } } // System.out.println( // lv.getStringValue() + Constants.OPS[relation] + rv.getStringValue()); switch(truncation) { case Constants.TRUNC_RIGHT: return lv.startsWith(collator, rv); case Constants.TRUNC_LEFT: return lv.endsWith(collator, rv); case Constants.TRUNC_BOTH: return lv.contains(collator, rv); default: return lv.compareTo(collator, relation, rv); } } /** * @param lv * @return * @throws XPathException */ private static boolean isEmptyString(AtomicValue lv) throws XPathException { if(Type.subTypeOf(lv.getType(), Type.STRING) || lv.getType() == Type.ATOMIC) { if(lv.getStringValue().length() == 0) return true; } return false; } private boolean checkArgumentTypes(XQueryContext context, DocumentSet docs) throws XPathException { for (Iterator i = docs.iterator(); i.hasNext();) { DocumentImpl doc = (DocumentImpl) i.next(); IndexSpec idxSpec = doc.getCollection().getIdxConf(context.getBroker()); if(idxSpec != null) { FulltextIndexSpec idx = idxSpec.getFulltextIndexSpec(); if (idx != null) { if(idx.isSelective()) return true; if(!idx.getIncludeAlphaNum()) return true; } } } return false; } /* (non-Javadoc) * @see org.exist.xquery.PathExpr#dump(org.exist.xquery.util.ExpressionDumper) */ public void dump(ExpressionDumper dumper) { getLeft().dump(dumper); dumper.display(' ').display(Constants.OPS[relation]).display(' '); getRight().dump(dumper); } public String toString() { StringBuffer result = new StringBuffer(); result.append(getLeft().toString()); result.append(' ').append(Constants.OPS[relation]).append(' '); result.append(getRight().toString()); return result.toString(); } protected void switchOperands() { context.getProfiler().message(this, Profiler.OPTIMIZATIONS, "OPTIMIZATION", "Switching operands"); //Invert relation switch (relation) { case Constants.GT : relation = Constants.LT; break; case Constants.LT : relation = Constants.GT; break; case Constants.LTEQ : relation = Constants.GTEQ; break; case Constants.GTEQ : relation = Constants.LTEQ; break; } Expression right = getRight(); setRight(getLeft()); setLeft(right); } /** * Possibly switch operands to simplify execution */ protected void simplify() { //Prefer nodes at the left hand if ((!Type.subTypeOf(getLeft().returnsType(), Type.NODE)) && Type.subTypeOf(getRight().returnsType(), Type.NODE)) switchOperands(); //Prefer fewer items at the left hand else if ((getLeft().getCardinality() & Cardinality.MANY) != 0 && (getRight().getCardinality() & Cardinality.MANY) == 0) switchOperands(); } protected Collator getCollator(Sequence contextSequence) throws XPathException { if(collationArg == null) return context.getDefaultCollator(); String collationURI = collationArg.eval(contextSequence).getStringValue(); return context.getCollator(collationURI); } public void setCollation(Expression collationArg) { this.collationArg = collationArg; } /* (non-Javadoc) * @see org.exist.xquery.PathExpr#resetState() */ public void resetState() { super.resetState(); getLeft().resetState(); getRight().resetState(); cached = null; } }
package org.lightmare.deploy.fs; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.nio.file.FileSystem; import java.nio.file.FileSystems; import java.nio.file.Path; import java.nio.file.StandardWatchEventKinds; import java.nio.file.WatchEvent; import java.nio.file.WatchEvent.Kind; import java.nio.file.WatchKey; import java.nio.file.WatchService; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.apache.log4j.Logger; import org.lightmare.cache.ConnectionContainer; import org.lightmare.cache.DeploymentDirectory; import org.lightmare.cache.MetaContainer; import org.lightmare.cache.RestContainer; import org.lightmare.config.Configuration; import org.lightmare.jpa.datasource.FileParsers; import org.lightmare.jpa.datasource.Initializer; import org.lightmare.rest.providers.RestProvider; import org.lightmare.utils.CollectionUtils; import org.lightmare.utils.LogUtils; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.concurrent.ThreadFactoryUtil; import org.lightmare.utils.fs.WatchUtils; /** * Deployment manager, {@link Watcher#deployFile(URL)}, * {@link Watcher#undeployFile(URL)}, {@link Watcher#listDeployments()} and * {@link File} modification event handler for deployments if java version is * 1.7 or above * * @author levan * */ public class Watcher implements Runnable { private static final String DEPLOY_THREAD_NAME = "watch_thread"; private static final int DEPLOY_POOL_PRIORITY = Thread.MAX_PRIORITY - 5; private static final long SLEEP_TIME = 5500L; private static final ExecutorService DEPLOY_POOL = Executors .newSingleThreadExecutor(new ThreadFactoryUtil(DEPLOY_THREAD_NAME, DEPLOY_POOL_PRIORITY)); private Set<DeploymentDirectory> deployments; private Set<String> dataSources; private static final Logger LOG = Logger.getLogger(Watcher.class); /** * Defines file types for watch service * * @author Levan * */ private static enum WatchFileType { DATA_SOURCE, DEPLOYMENT, NONE; } /** * To filter only deployed sub files from directory * * @author levan * */ private static class DeployFiletr implements FileFilter { @Override public boolean accept(File file) { boolean accept; try { URL url = file.toURI().toURL(); url = WatchUtils.clearURL(url); accept = MetaContainer.chackDeployment(url); } catch (MalformedURLException ex) { LOG.error(ex.getMessage(), ex); accept = false; } catch (IOException ex) { LOG.error(ex.getMessage(), ex); accept = false; } return accept; } } private Watcher() { deployments = getDeployDirectories(); dataSources = getDataSourcePaths(); } private static URL getAppropriateURL(String fileName) throws IOException { File file = new File(fileName); URL url = file.toURI().toURL(); url = WatchUtils.clearURL(url); return url; } private static Set<DeploymentDirectory> getDeployDirectories() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<DeploymentDirectory> deploymetDirss = new HashSet<DeploymentDirectory>(); Set<DeploymentDirectory> deploymetDirssCurrent; for (Configuration config : configs) { deploymetDirssCurrent = config.getDeploymentPath(); if (config.isWatchStatus() && CollectionUtils.available(deploymetDirssCurrent)) { deploymetDirss.addAll(deploymetDirssCurrent); } } return deploymetDirss; } private static Set<String> getDataSourcePaths() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<String> paths = new HashSet<String>(); Set<String> pathsCurrent; for (Configuration config : configs) { pathsCurrent = config.getDataSourcePath(); if (config.isWatchStatus() && CollectionUtils.available(pathsCurrent)) { paths.addAll(pathsCurrent); } } return paths; } private static WatchFileType checkType(String fileName) { WatchFileType type; File file = new File(fileName); String path = file.getPath(); String filePath = WatchUtils.clearPath(path); path = file.getParent(); String parentPath = WatchUtils.clearPath(path); Set<DeploymentDirectory> apps = getDeployDirectories(); Set<String> dss = getDataSourcePaths(); if (CollectionUtils.available(apps)) { String deploymantPath; Iterator<DeploymentDirectory> iterator = apps.iterator(); boolean notDeployment = Boolean.TRUE; DeploymentDirectory deployment; while (iterator.hasNext() && notDeployment) { deployment = iterator.next(); deploymantPath = deployment.getPath(); notDeployment = ObjectUtils.notEquals(deploymantPath, parentPath); } if (notDeployment) { type = WatchFileType.NONE; } else { type = WatchFileType.DEPLOYMENT; } } else if (CollectionUtils.available(dss) && dss.contains(filePath)) { type = WatchFileType.DATA_SOURCE; } else { type = WatchFileType.NONE; } return type; } private static void fillFileList(File[] files, List<File> list) { if (CollectionUtils.available(files)) { for (File file : files) { list.add(file); } } } /** * Lists all deployed {@link File}s * * @return {@link List}<File> */ public static List<File> listDeployments() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<DeploymentDirectory> deploymetDirss = new HashSet<DeploymentDirectory>(); Set<DeploymentDirectory> deploymetDirssCurrent; for (Configuration config : configs) { deploymetDirssCurrent = config.getDeploymentPath(); if (CollectionUtils.available(deploymetDirssCurrent)) { deploymetDirss.addAll(deploymetDirssCurrent); } } File[] files; List<File> list = new ArrayList<File>(); if (CollectionUtils.available(deploymetDirss)) { String path; DeployFiletr filter = new DeployFiletr(); for (DeploymentDirectory deployment : deploymetDirss) { path = deployment.getPath(); files = new File(path).listFiles(filter); fillFileList(files, list); } } return list; } /** * Lists all data source {@link File}s * * @return {@link List}<File> */ public static List<File> listDataSources() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<String> paths = new HashSet<String>(); Set<String> pathsCurrent; for (Configuration config : configs) { pathsCurrent = config.getDataSourcePath(); if (CollectionUtils.available(pathsCurrent)) { paths.addAll(pathsCurrent); } } File file; List<File> list = new ArrayList<File>(); if (CollectionUtils.available(paths)) { for (String path : paths) { file = new File(path); list.add(file); } } return list; } public static void deployFile(String fileName) throws IOException { WatchFileType type = checkType(fileName); if (type.equals(WatchFileType.DATA_SOURCE)) { FileParsers fileParsers = new FileParsers(); fileParsers.parseStandaloneXml(fileName); } else if (type.equals(WatchFileType.DEPLOYMENT)) { URL url = getAppropriateURL(fileName); deployFile(url); } } public static void deployFile(URL url) throws IOException { URL[] archives = { url }; MetaContainer.getCreator().scanForBeans(archives); } public static void undeployFile(URL url) throws IOException { boolean valid = MetaContainer.undeploy(url); if (valid && RestContainer.hasRest()) { RestProvider.reload(); } } public static void undeployFile(String fileName) throws IOException { WatchFileType type = checkType(fileName); if (type.equals(WatchFileType.DATA_SOURCE)) { Initializer.undeploy(fileName); } else if (type.equals(WatchFileType.DEPLOYMENT)) { URL url = getAppropriateURL(fileName); undeployFile(url); } } public static void redeployFile(String fileName) throws IOException { undeployFile(fileName); deployFile(fileName); } private void handleEvent(Path dir, WatchEvent<Path> currentEvent) throws IOException { if (currentEvent == null) { return; } Path prePath = currentEvent.context(); Path path = dir.resolve(prePath); String fileName = path.toString(); int count = currentEvent.count(); Kind<?> kind = currentEvent.kind(); if (kind == StandardWatchEventKinds.ENTRY_MODIFY) { LogUtils.info(LOG, "Modify: %s, count: %s\n", fileName, count); redeployFile(fileName); } else if (kind == StandardWatchEventKinds.ENTRY_DELETE) { LogUtils.info(LOG, "Delete: %s, count: %s\n", fileName, count); undeployFile(fileName); } else if (kind == StandardWatchEventKinds.ENTRY_CREATE) { LogUtils.info(LOG, "Create: %s, count: %s\n", fileName, count); redeployFile(fileName); } } private void runService(WatchService watch) throws IOException { Path dir; boolean toRun = true; boolean valid; while (toRun) { try { WatchKey key; key = watch.take(); List<WatchEvent<?>> events = key.pollEvents(); WatchEvent<?> currentEvent = null; WatchEvent<Path> typedCurrentEvent; int times = 0; dir = (Path) key.watchable(); for (WatchEvent<?> event : events) { if (event.kind() == StandardWatchEventKinds.OVERFLOW) { continue; } if (times == 0 || event.count() > currentEvent.count()) { currentEvent = event; } times++; valid = key.reset(); toRun = valid && key.isValid(); if (toRun) { Thread.sleep(SLEEP_TIME); typedCurrentEvent = ObjectUtils.cast(currentEvent); handleEvent(dir, typedCurrentEvent); } } } catch (InterruptedException ex) { throw new IOException(ex); } } } private void registerPath(FileSystem fs, String path, WatchService watch) throws IOException { Path deployPath = fs.getPath(path); deployPath.register(watch, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_MODIFY, StandardWatchEventKinds.OVERFLOW, StandardWatchEventKinds.ENTRY_DELETE); runService(watch); } private void registerPaths(File[] files, FileSystem fs, WatchService watch) throws IOException { String path; for (File file : files) { path = file.getPath(); registerPath(fs, path, watch); } } private void registerPaths(Collection<DeploymentDirectory> deploymentDirss, FileSystem fs, WatchService watch) throws IOException { String path; boolean scan; File directory; File[] files; for (DeploymentDirectory deployment : deploymentDirss) { path = deployment.getPath(); scan = deployment.isScan(); if (scan) { directory = new File(path); files = directory.listFiles(); if (CollectionUtils.available(files)) { registerPaths(files, fs, watch); } } else { registerPath(fs, path, watch); } } } private void registerDsPaths(Collection<String> paths, FileSystem fs, WatchService watch) throws IOException { for (String path : paths) { registerPath(fs, path, watch); } } @Override public void run() { try { FileSystem fs = FileSystems.getDefault(); WatchService watch = null; try { watch = fs.newWatchService(); } catch (IOException ex) { LOG.error(ex.getMessage(), ex); throw ex; } if (CollectionUtils.available(deployments)) { registerPaths(deployments, fs, watch); } if (ObjectUtils.available(dataSources)) { registerDsPaths(dataSources, fs, watch); } } catch (IOException ex) { LOG.fatal(ex.getMessage(), ex); LOG.fatal("system going to shut down cause of hot deployment"); try { ConnectionContainer.closeConnections(); } catch (IOException iex) { LOG.fatal(iex.getMessage(), iex); } System.exit(-1); } finally { DEPLOY_POOL.shutdown(); } } public static void startWatch() { Watcher watcher = new Watcher(); DEPLOY_POOL.submit(watcher); } }
package org.flymine.objectstore.ojb; import junit.framework.*; import org.flymine.sql.Database; import org.flymine.sql.DatabaseFactory; import org.flymine.objectstore.query.*; import org.apache.ojb.broker.*; import org.apache.ojb.broker.metadata.*; import org.flymine.model.testmodel.Company; public class SqlGeneratorFlymineImplTest extends TestCase { public SqlGeneratorFlymineImplTest(String arg1) { super(arg1); } public void testPreparedSelectStatement() throws Exception { Database db = DatabaseFactory.getDatabase("db.unittest"); ObjectStoreOjbImpl os = ObjectStoreOjbImpl.getInstance(db); PersistenceBroker broker = os.getPersistenceBroker(); DescriptorRepository dr = broker.getDescriptorRepository(); SqlGeneratorFlymineImpl gen = (SqlGeneratorFlymineImpl) broker.serviceSqlGenerator(); Query q = new Query(); QueryClass qc1 = new QueryClass(Company.class); q.addToSelect(qc1); q.addFrom(qc1); FlymineSqlSelectStatement s1 = new FlymineSqlSelectStatement(q, dr, 0, 10000); assertEquals(s1.getStatement(), gen.getPreparedSelectStatement(q, dr, 0, 10000)); } }
package org.jgroups.protocols.pbcast; import org.jgroups.*; import org.jgroups.annotations.GuardedBy; import org.jgroups.stack.Protocol; import org.jgroups.util.Digest; import org.jgroups.util.Promise; import org.jgroups.util.Streamable; import org.jgroups.util.Util; import java.io.*; import java.util.*; import java.util.concurrent.atomic.AtomicBoolean; /** * Flush, as it name implies, forces group members to flush their pending * messages while blocking them to send any additional messages. The process of * flushing acquiesces the group so that state transfer or a join can be done. * It is also called stop-the-world model as nobody will be able to send * messages while a flush is in process. * * <p> * Flush is needed for: * <p> * (1) State transfer. When a member requests state transfer, the coordinator * tells everyone to stop sending messages and waits for everyone's ack. Then it * asks the application for its state and ships it back to the requester. After * the requester has received and set the state successfully, the coordinator * tells everyone to resume sending messages. * <p> * (2) View changes (e.g.a join). Before installing a new view V2, flushing * would ensure that all messages *sent* in the current view V1 are indeed * *delivered* in V1, rather than in V2 (in all non-faulty members). This is * essentially Virtual Synchrony. * * * * @author Vladimir Blagojevic * @version $Id$ * @since 2.4 */ public class FLUSH extends Protocol { public static final String NAME = "FLUSH"; @GuardedBy("sharedLock") private View currentView; private Address localAddress; /** * Group member that requested FLUSH. For view installations flush * coordinator is the group coordinator For state transfer flush coordinator * is the state requesting member */ @GuardedBy("sharedLock") private Address flushCoordinator; @GuardedBy("sharedLock") private final List<Address> flushMembers; @GuardedBy("sharedLock") private final Map<Address, Digest> flushCompletedMap; @GuardedBy("sharedLock") private final Set<Address> stopFlushOkSet; @GuardedBy("sharedLock") private final Set<Address> suspected; private final Object sharedLock = new Object(); private final Object blockMutex = new Object(); /** * Indicates if FLUSH.down() is currently blocking threads Condition * predicate associated with blockMutex */ @GuardedBy("blockMutex") private volatile boolean isBlockingFlushDown = true; /** * Default timeout for a group member to be in * <code>isBlockingFlushDown</code> */ private long timeout = 8000; /** * Default timeout for a group member to wait for <code>Channel#startFlush</code> to return * */ private long start_flush_timeout = 6000; private boolean enable_reconciliation = true; @GuardedBy("sharedLock") private boolean receivedFirstView = false; @GuardedBy("sharedLock") private boolean receivedMoreThanOneView = false; private volatile boolean allowMessagesToPassUp = false; private long startFlushTime; private long totalTimeInFlush; private int numberOfFlushes; private double averageFlushDuration; private final Promise<Boolean> flush_promise = new Promise<Boolean>(); private final AtomicBoolean flushInProgress = new AtomicBoolean(false); @GuardedBy("sharedLock") private final List<Address> reconcileOks = new ArrayList<Address>(); public FLUSH(){ super(); currentView = new View(new ViewId(), new Vector<Address>()); flushCompletedMap = new HashMap<Address, Digest>(); stopFlushOkSet = new TreeSet<Address>(); flushMembers = new ArrayList<Address>(); suspected = new TreeSet<Address>(); } public String getName() { return NAME; } public boolean setProperties(Properties props) { super.setProperties(props); timeout = Util.parseLong(props, "timeout", timeout); start_flush_timeout = Util.parseLong(props, "start_flush_timeout", start_flush_timeout); enable_reconciliation = Util.parseBoolean(props, "enable_reconciliation", enable_reconciliation); String str = props.getProperty("auto_flush_conf"); if(str != null){ log.warn("auto_flush_conf has been deprecated and its value will be ignored"); props.remove("auto_flush_conf"); } if(!props.isEmpty()){ log.error("the following properties are not recognized: " + props); return false; } return true; } public void start() throws Exception { Map<String, Object> map = new HashMap<String, Object>(); map.put("flush_supported", Boolean.TRUE); up_prot.up(new Event(Event.CONFIG, map)); down_prot.down(new Event(Event.CONFIG, map)); synchronized(sharedLock){ receivedFirstView = false; receivedMoreThanOneView = false; } synchronized(blockMutex){ isBlockingFlushDown = true; } allowMessagesToPassUp = false; } public void stop() { synchronized(sharedLock){ currentView = new View(new ViewId(), new Vector<Address>()); flushCompletedMap.clear(); stopFlushOkSet.clear(); flushMembers.clear(); suspected.clear(); flushCoordinator = null; } } public double getAverageFlushDuration() { return averageFlushDuration; } public long getTotalTimeInFlush() { return totalTimeInFlush; } public int getNumberOfFlushes() { return numberOfFlushes; } public boolean startFlush() { return startFlush(new Event(Event.SUSPEND), 3, false); } private boolean startFlush(Event evt, int numberOfAttempts, boolean isRetry) { boolean successfulFlush = false; if(!flushInProgress.get() || isRetry){ flush_promise.reset(); if(log.isDebugEnabled()){ if(isRetry) log.debug("Retrying FLUSH at " + localAddress + ", " + evt + ". Attempts left " + numberOfAttempts); else log.debug("Received " + evt + " at " + localAddress + ". Running FLUSH..."); } onSuspend((View) evt.getArg()); try{ Boolean r = flush_promise.getResultWithTimeout(start_flush_timeout); successfulFlush = r.booleanValue(); }catch(TimeoutException e){ if(log.isDebugEnabled()) log.debug("At " + localAddress + " timed out waiting for flush responses after " + start_flush_timeout + " msec"); } } if(!successfulFlush && numberOfAttempts > 0){ long backOffSleepTime = Util.random(5); backOffSleepTime = backOffSleepTime < 2 ? backOffSleepTime + 2 : backOffSleepTime; if(log.isDebugEnabled()) log.debug("At " + localAddress + ". Backing off for " + backOffSleepTime + " sec. Attempts left " + numberOfAttempts); Util.sleep(backOffSleepTime*1000); successfulFlush = startFlush(evt, --numberOfAttempts, true); } return successfulFlush; } public void stopFlush() { down(new Event(Event.RESUME)); } public Object down(Event evt) { switch(evt.getType()){ case Event.MSG: Message msg = (Message) evt.getArg(); Address dest = msg.getDest(); if(dest == null || dest.isMulticastAddress()){ //mcasts FlushHeader fh = (FlushHeader) msg.getHeader(getName()); if(fh != null && fh.type == FlushHeader.FLUSH_BYPASS){ return down_prot.down(evt); } else{ blockMessageDuringFlush(); } }else{ //unicasts are irrelevant in virtual synchrony, let them through return down_prot.down(evt); } break; case Event.CONNECT: case Event.CONNECT_WITH_STATE_TRANSFER: sendBlockUpToChannel(); break; case Event.SUSPEND: return startFlush(evt, 3, false); case Event.RESUME: onResume(); return null; } return down_prot.down(evt); } private void blockMessageDuringFlush() { boolean shouldSuspendByItself = false; long start = 0, stop = 0; synchronized(blockMutex){ while(isBlockingFlushDown){ if(log.isDebugEnabled()) log.debug("FLUSH block at " + localAddress + " for " + (timeout <= 0 ? "ever" : timeout + "ms")); try{ start = System.currentTimeMillis(); if(timeout <= 0) blockMutex.wait(); else blockMutex.wait(timeout); stop = System.currentTimeMillis(); }catch(InterruptedException e){ Thread.currentThread().interrupt(); // set interrupt flag // again } if(isBlockingFlushDown){ isBlockingFlushDown = false; shouldSuspendByItself = true; blockMutex.notifyAll(); } } } if(shouldSuspendByItself){ log.warn("unblocking FLUSH.down() at " + localAddress + " after timeout of " + (stop - start) + "ms"); flush_promise.setResult(Boolean.TRUE); } } public Object up(Event evt) { switch(evt.getType()){ case Event.MSG: Message msg = (Message) evt.getArg(); FlushHeader fh = (FlushHeader) msg.getHeader(getName()); if(fh != null){ switch(fh.type){ case FlushHeader.FLUSH_BYPASS: return up_prot.up(evt); case FlushHeader.START_FLUSH: handleStartFlush(msg, fh); break; case FlushHeader.FLUSH_RECONCILE: handleFlushReconcile(msg, fh); break; case FlushHeader.FLUSH_RECONCILE_OK: onFlushReconcileOK(msg); break; case FlushHeader.STOP_FLUSH: onStopFlush(); break; case FlushHeader.ABORT_FLUSH: synchronized(sharedLock){ flushCompletedMap.clear(); } flush_promise.setResult(Boolean.FALSE); break; case FlushHeader.FLUSH_COMPLETED: if(isCurrentFlushMessage(fh)) onFlushCompleted(msg.getSrc(), fh.digest); break; } return null; // do not pass FLUSH msg up }else{ // for processing of application messages after we join, // lets wait for STOP_FLUSH to complete // before we start allowing message up. if(!allowMessagesToPassUp) return null; } break; case Event.VIEW_CHANGE: // if this is channel's first view and its the only member of the // group then the // goal is to pass BLOCK,VIEW,UNBLOCK to application space on the // same thread as VIEW. View newView = (View) evt.getArg(); boolean firstView = onViewChange(newView); boolean singletonMember = newView.size() == 1 && newView.containsMember(localAddress); if(firstView && singletonMember){ up_prot.up(evt); synchronized(blockMutex){ isBlockingFlushDown = false; blockMutex.notifyAll(); } if(log.isDebugEnabled()) log.debug("At " + localAddress + " unblocking FLUSH.down() and sending UNBLOCK up"); allowMessagesToPassUp = true; up_prot.up(new Event(Event.UNBLOCK)); return null; } break; case Event.TMP_VIEW: /* * April 25, 2007 * * Accomodating current NAKACK (1.127) * * Updates field currentView of a leaving coordinator. Leaving * coordinator, after it sends out the view, does not need to * participate in second flush phase. * * see onStopFlush(); * * TODO: revisit if still needed post NAKACK 1.127 * */ View tmpView = (View) evt.getArg(); if(!tmpView.containsMember(localAddress)){ onViewChange(tmpView); } break; case Event.SET_LOCAL_ADDRESS: localAddress = (Address) evt.getArg(); break; case Event.SUSPECT: onSuspect((Address) evt.getArg()); break; case Event.SUSPEND: return startFlush(evt, 3, false); case Event.RESUME: onResume(); return null; } return up_prot.up(evt); } private void onFlushReconcileOK(Message msg) { if(log.isDebugEnabled()) log.debug(localAddress + " received reconcile ok from " + msg.getSrc()); synchronized(sharedLock){ reconcileOks.add(msg.getSrc()); if(reconcileOks.size() >= flushMembers.size()){ flush_promise.setResult(Boolean.TRUE); if(log.isDebugEnabled()) log.debug("All FLUSH_RECONCILE_OK received at " + localAddress); } } } private void handleFlushReconcile(Message msg, FlushHeader fh) { Address requester = msg.getSrc(); Digest reconcileDigest = fh.digest; if(log.isDebugEnabled()) log.debug("Received FLUSH_RECONCILE at " + localAddress + " passing digest to NAKACK " + reconcileDigest); // Let NAKACK reconcile missing messages down_prot.down(new Event(Event.REBROADCAST, reconcileDigest)); if(log.isDebugEnabled()) log.debug("Returned from FLUSH_RECONCILE at " + localAddress + " Sending RECONCILE_OK to " + requester + ", thread " + Thread.currentThread()); Message reconcileOk = new Message(requester); reconcileOk.setFlag(Message.OOB); reconcileOk.putHeader(getName(), new FlushHeader(FlushHeader.FLUSH_RECONCILE_OK)); down_prot.down(new Event(Event.MSG, reconcileOk)); } private void handleStartFlush(Message msg, FlushHeader fh) { Address coordinator = null; boolean proceed = false; Address flushRequester = msg.getSrc(); synchronized (sharedLock) { proceed = flushInProgress.compareAndSet(false, true); if(proceed){ flushCoordinator = flushRequester; }else{ if(flushCoordinator != null) coordinator = flushCoordinator; else coordinator = flushRequester; } } if(proceed){ sendBlockUpToChannel(); onStartFlush(flushRequester, fh); } else{ if(flushRequester.compareTo(coordinator) < 0){ rejectFlush(fh.viewID, coordinator); if(log.isDebugEnabled()){ log.debug("Rejecting flush at " + localAddress + " to current flush coordinator " + coordinator + " and switching flush coordinator to " + flushRequester); } onStartFlush(flushRequester, fh); }else if(flushRequester.compareTo(coordinator) > 0){ rejectFlush(fh.viewID, flushRequester); if(log.isDebugEnabled()){ log.debug("Rejecting flush at " + localAddress + " to flush requester " + flushRequester + " coordinator is " + coordinator); } onStartFlush(coordinator, fh); }else if(flushRequester.equals(coordinator)){ rejectFlush(fh.viewID, flushRequester); if(log.isDebugEnabled()){ log.debug("Rejecting flush at " + localAddress + ", previous flush has to finish first"); } } } } public Vector<Integer> providedDownServices() { Vector<Integer> retval = new Vector<Integer>(2); retval.addElement(new Integer(Event.SUSPEND)); retval.addElement(new Integer(Event.RESUME)); return retval; } private void rejectFlush(long viewId, Address flushRequester) { Message reject = new Message(flushRequester, localAddress, null); reject.putHeader(getName(), new FlushHeader(FlushHeader.ABORT_FLUSH, viewId)); down_prot.down(new Event(Event.MSG, reject)); } private void sendBlockUpToChannel() { up_prot.up(new Event(Event.BLOCK)); } private boolean isCurrentFlushMessage(FlushHeader fh) { return fh.viewID == currentViewId(); } private long currentViewId() { long viewId = -1; synchronized(sharedLock){ ViewId view = currentView.getVid(); if(view != null){ viewId = view.getId(); } } return viewId; } private boolean onViewChange(View view) { boolean amINewCoordinator = false; boolean isThisOurFirstView = false; synchronized(sharedLock){ if(receivedFirstView){ receivedMoreThanOneView = true; } if(!receivedFirstView){ receivedFirstView = true; } isThisOurFirstView = receivedFirstView && !receivedMoreThanOneView; suspected.retainAll(view.getMembers()); currentView = view; boolean coordinatorLeft = flushCoordinator != null && !view.containsMember(flushCoordinator); if(coordinatorLeft){ flushCoordinator = view.getMembers().get(0); amINewCoordinator = localAddress.equals(flushCoordinator); } } // If coordinator leaves, its STOP FLUSH message will be discarded by // other members at NAKACK layer. Remaining members will be hung, // waiting for STOP_FLUSH message. If I am new coordinator I will complete the // FLUSH and send STOP_FLUSH on flush callers behalf. if(amINewCoordinator){ if(log.isDebugEnabled()) log.debug("Coordinator left, " + localAddress + " will complete flush"); onResume(); } if(log.isDebugEnabled()) log.debug("Installing view at " + localAddress + " view is " + view); return isThisOurFirstView; } private void onStopFlush() { if(stats){ long stopFlushTime = System.currentTimeMillis(); totalTimeInFlush += (stopFlushTime - startFlushTime); if(numberOfFlushes > 0){ averageFlushDuration = totalTimeInFlush / (double) numberOfFlushes; } } boolean amISurvivingMember = false; synchronized(sharedLock){ amISurvivingMember = currentView.containsMember(localAddress); flushCompletedMap.clear(); stopFlushOkSet.clear(); flushMembers.clear(); suspected.clear(); flushCoordinator = null; allowMessagesToPassUp = true; } if(log.isDebugEnabled()) log.debug("At " + localAddress + " received STOP_FLUSH, unblocking FLUSH.down() and sending UNBLOCK up"); synchronized(blockMutex){ isBlockingFlushDown = false; blockMutex.notifyAll(); } if(amISurvivingMember){ up_prot.up(new Event(Event.UNBLOCK)); } flushInProgress.set(false); } private void onSuspend(View view) { Message msg = null; Collection<Address> participantsInFlush = null; synchronized(sharedLock){ // start FLUSH only on group members that we need to flush if(view != null){ participantsInFlush = new ArrayList<Address>(view.getMembers()); participantsInFlush.retainAll(currentView.getMembers()); }else{ participantsInFlush = new ArrayList<Address>(currentView.getMembers()); } msg = new Message(null, localAddress, null); msg.putHeader(getName(), new FlushHeader(FlushHeader.START_FLUSH, currentViewId(), participantsInFlush)); } if(participantsInFlush.isEmpty()){ flush_promise.setResult(Boolean.TRUE); }else{ down_prot.down(new Event(Event.MSG, msg)); if(log.isDebugEnabled()) log.debug("Flush coordinator " + localAddress + " is starting FLUSH with participants " + participantsInFlush); } } private void onResume() { long viewID = currentViewId(); Message msg = new Message(null, localAddress, null); //Cannot be OOB since START_FLUSH is not OOB //we have to FIFO order two subsequent flushes msg.putHeader(getName(), new FlushHeader(FlushHeader.STOP_FLUSH, viewID)); down_prot.down(new Event(Event.MSG, msg)); if(log.isDebugEnabled()) log.debug("Received RESUME at " + localAddress + ", sent STOP_FLUSH to all"); } private void onStartFlush(Address flushStarter, FlushHeader fh) { synchronized(blockMutex){ isBlockingFlushDown = true; } if(stats){ startFlushTime = System.currentTimeMillis(); numberOfFlushes += 1; } boolean amIParticipant = false; synchronized(sharedLock){ flushCoordinator = flushStarter; flushMembers.clear(); if(fh.flushParticipants != null){ flushMembers.addAll(fh.flushParticipants); } flushMembers.removeAll(suspected); amIParticipant = flushMembers.contains(localAddress); } if(amIParticipant){ Digest digest = (Digest) down_prot.down(new Event(Event.GET_DIGEST)); FlushHeader fhr = new FlushHeader(FlushHeader.FLUSH_COMPLETED, fh.viewID); fhr.addDigest(digest); Message msg = new Message(flushStarter); msg.putHeader(getName(), fhr); down_prot.down(new Event(Event.MSG, msg)); if(log.isDebugEnabled()) log.debug("Received START_FLUSH at " + localAddress + " responded with FLUSH_OK"); } } private void onFlushCompleted(Address address, Digest digest) { boolean flushCompleted = false; Message msg = null; boolean needsReconciliationPhase = false; synchronized(sharedLock){ flushCompletedMap.put(address, digest); if(flushCompletedMap.size() >= flushMembers.size()){ flushCompleted = flushCompletedMap.keySet().containsAll(flushMembers); } if(log.isDebugEnabled()) log.debug("At " + localAddress + " FLUSH_COMPLETED from " + address + ",completed " + flushCompleted + ",flushCompleted " + flushCompletedMap.keySet()); needsReconciliationPhase = enable_reconciliation && flushCompleted && hasVirtualSynchronyGaps(); if(needsReconciliationPhase){ Digest d = findHighestSequences(); msg = new Message(); msg.setFlag(Message.OOB); FlushHeader fh = new FlushHeader(FlushHeader.FLUSH_RECONCILE, currentViewId(), flushMembers); reconcileOks.clear(); fh.addDigest(d); msg.putHeader(getName(), fh); if(log.isTraceEnabled()) log.trace("Reconciling flush mebers due to virtual synchrony gap, digest is " + d + " flush members are " + flushMembers); flushCompletedMap.clear(); } } if(needsReconciliationPhase){ down_prot.down(new Event(Event.MSG, msg)); }else if(flushCompleted){ flush_promise.setResult(Boolean.TRUE); if(log.isDebugEnabled()) log.debug("All FLUSH_COMPLETED received at " + localAddress); } } private boolean hasVirtualSynchronyGaps() { ArrayList<Digest> digests = new ArrayList<Digest>(); digests.addAll(flushCompletedMap.values()); Digest firstDigest = digests.get(0); List<Digest> remainingDigests = digests.subList(1, digests.size()); for(Digest digest:remainingDigests){ Digest diff = firstDigest.difference(digest); if(diff != Digest.EMPTY_DIGEST){ return true; } } return false; } private Digest findHighestSequences() { Digest result = null; List<Digest> digests = new ArrayList<Digest>(flushCompletedMap.values()); result = digests.get(0); List<Digest> remainingDigests = digests.subList(1, digests.size()); for(Digest digestG:remainingDigests){ result = result.highestSequence(digestG); } return result; } private void onSuspect(Address address) { boolean flushOkCompleted = false; Message m = null; long viewID = 0; synchronized(sharedLock){ suspected.add(address); flushMembers.removeAll(suspected); viewID = currentViewId(); flushOkCompleted = !flushCompletedMap.isEmpty() && flushCompletedMap.keySet().containsAll(flushMembers); if(flushOkCompleted){ m = new Message(flushCoordinator, localAddress, null); } if(log.isDebugEnabled()) log.debug("Suspect is " + address + ",completed " + flushOkCompleted + ", flushOkSet " + flushCompletedMap + " flushMembers " + flushMembers); } if(flushOkCompleted){ Digest digest = (Digest) down_prot.down(new Event(Event.GET_DIGEST)); FlushHeader fh = new FlushHeader(FlushHeader.FLUSH_COMPLETED, viewID); fh.addDigest(digest); m.putHeader(getName(), fh); down_prot.down(new Event(Event.MSG, m)); if(log.isDebugEnabled()) log.debug(localAddress + " sent FLUSH_COMPLETED message to " + flushCoordinator); } } public static class FlushHeader extends Header implements Streamable { public static final byte START_FLUSH = 0; public static final byte STOP_FLUSH = 2; public static final byte FLUSH_COMPLETED = 3; public static final byte ABORT_FLUSH = 5; public static final byte FLUSH_BYPASS = 6; public static final byte FLUSH_RECONCILE = 7; public static final byte FLUSH_RECONCILE_OK = 8; byte type; long viewID; Collection<Address> flushParticipants; Digest digest = null; private static final long serialVersionUID=-6248843990215637687L; public FlushHeader(){ this(START_FLUSH, 0); } // used for externalization public FlushHeader(byte type){ this(type, 0); } public FlushHeader(byte type,long viewID){ this(type, viewID, null); } public FlushHeader(byte type,long viewID,Collection<Address> flushView){ this.type = type; this.viewID = viewID; this.flushParticipants = flushView; } public void addDigest(Digest digest) { this.digest = digest; } public String toString() { switch(type){ case START_FLUSH: return "FLUSH[type=START_FLUSH,viewId=" + viewID + ",members=" + flushParticipants + "]"; case STOP_FLUSH: return "FLUSH[type=STOP_FLUSH,viewId=" + viewID + "]"; case ABORT_FLUSH: return "FLUSH[type=ABORT_FLUSH,viewId=" + viewID + "]"; case FLUSH_COMPLETED: return "FLUSH[type=FLUSH_COMPLETED,viewId=" + viewID + "]"; case FLUSH_BYPASS: return "FLUSH[type=FLUSH_BYPASS,viewId=" + viewID + "]"; case FLUSH_RECONCILE: return "FLUSH[type=FLUSH_RECONCILE,viewId=" + viewID + ",digest=" + digest + "]"; case FLUSH_RECONCILE_OK: return "FLUSH[type=FLUSH_RECONCILE_OK,viewId=" + viewID + "]"; default: return "[FLUSH: unknown type (" + type + ")]"; } } public void writeExternal(ObjectOutput out) throws IOException { out.writeByte(type); out.writeLong(viewID); out.writeObject(flushParticipants); out.writeObject(digest); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { type = in.readByte(); viewID = in.readLong(); flushParticipants = (Collection) in.readObject(); digest = (Digest) in.readObject(); } public void writeTo(DataOutputStream out) throws IOException { out.writeByte(type); out.writeLong(viewID); if(flushParticipants != null && !flushParticipants.isEmpty()){ out.writeShort(flushParticipants.size()); for(Iterator<Address> iter = flushParticipants.iterator();iter.hasNext();){ Address address = iter.next(); Util.writeAddress(address, out); } }else{ out.writeShort(0); } if(digest != null){ out.writeBoolean(true); Util.writeStreamable(digest, out); }else{ out.writeBoolean(false); } } public void readFrom(DataInputStream in) throws IOException, IllegalAccessException, InstantiationException { type = in.readByte(); viewID = in.readLong(); int flushParticipantsSize = in.readShort(); if(flushParticipantsSize > 0){ flushParticipants = new ArrayList<Address>(flushParticipantsSize); for(int i = 0;i < flushParticipantsSize;i++){ flushParticipants.add(Util.readAddress(in)); } } boolean hasDigest = in.readBoolean(); if(hasDigest){ digest = (Digest) Util.readStreamable(Digest.class, in); } } } }
package org.lightmare.deploy.fs; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.nio.file.FileSystem; import java.nio.file.FileSystems; import java.nio.file.Path; import java.nio.file.StandardWatchEventKinds; import java.nio.file.WatchEvent; import java.nio.file.WatchEvent.Kind; import java.nio.file.WatchKey; import java.nio.file.WatchService; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.apache.log4j.Logger; import org.lightmare.cache.ConnectionContainer; import org.lightmare.cache.DeploymentDirectory; import org.lightmare.cache.MetaContainer; import org.lightmare.cache.RestContainer; import org.lightmare.config.Configuration; import org.lightmare.jpa.datasource.FileParsers; import org.lightmare.jpa.datasource.Initializer; import org.lightmare.rest.providers.RestProvider; import org.lightmare.utils.CollectionUtils; import org.lightmare.utils.LogUtils; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.concurrent.ThreadFactoryUtil; import org.lightmare.utils.fs.WatchUtils; /** * Deployment manager, {@link Watcher#deployFile(URL)}, * {@link Watcher#undeployFile(URL)}, {@link Watcher#listDeployments()} and * {@link File} modification event handler for deployments if java version is * 1.7 or above * * @author levan * */ public class Watcher implements Runnable { private static final String DEPLOY_THREAD_NAME = "watch_thread"; private static final int DEPLOY_POOL_PRIORITY = Thread.MAX_PRIORITY - 5; private static final long SLEEP_TIME = 5500L; private static final ExecutorService DEPLOY_POOL = Executors .newSingleThreadExecutor(new ThreadFactoryUtil(DEPLOY_THREAD_NAME, DEPLOY_POOL_PRIORITY)); private Set<DeploymentDirectory> deployments; private Set<String> dataSources; private static final Logger LOG = Logger.getLogger(Watcher.class); /** * Defines file types for watch service * * @author Levan * */ private static enum WatchFileType { DATA_SOURCE, DEPLOYMENT, NONE; } /** * To filter only deployed sub files from directory * * @author levan * */ private static class DeployFiletr implements FileFilter { @Override public boolean accept(File file) { boolean accept; try { URL url = file.toURI().toURL(); url = WatchUtils.clearURL(url); accept = MetaContainer.chackDeployment(url); } catch (MalformedURLException ex) { LOG.error(ex.getMessage(), ex); accept = false; } catch (IOException ex) { LOG.error(ex.getMessage(), ex); accept = false; } return accept; } } private Watcher() { deployments = getDeployDirectories(); dataSources = getDataSourcePaths(); } private static URL getAppropriateURL(String fileName) throws IOException { File file = new File(fileName); URL url = file.toURI().toURL(); url = WatchUtils.clearURL(url); return url; } private static Set<DeploymentDirectory> getDeployDirectories() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<DeploymentDirectory> deploymetDirss = new HashSet<DeploymentDirectory>(); Set<DeploymentDirectory> deploymetDirssCurrent; for (Configuration config : configs) { deploymetDirssCurrent = config.getDeploymentPath(); if (config.isWatchStatus() && CollectionUtils.available(deploymetDirssCurrent)) { deploymetDirss.addAll(deploymetDirssCurrent); } } return deploymetDirss; } private static Set<String> getDataSourcePaths() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<String> paths = new HashSet<String>(); Set<String> pathsCurrent; for (Configuration config : configs) { pathsCurrent = config.getDataSourcePath(); if (config.isWatchStatus() && CollectionUtils.available(pathsCurrent)) { paths.addAll(pathsCurrent); } } return paths; } private static WatchFileType checkType(String fileName) { WatchFileType type; File file = new File(fileName); String path = file.getPath(); String filePath = WatchUtils.clearPath(path); path = file.getParent(); String parentPath = WatchUtils.clearPath(path); Set<DeploymentDirectory> apps = getDeployDirectories(); Set<String> dss = getDataSourcePaths(); if (CollectionUtils.available(apps)) { String deploymantPath; Iterator<DeploymentDirectory> iterator = apps.iterator(); boolean notDeployment = Boolean.TRUE; DeploymentDirectory deployment; while (iterator.hasNext() && notDeployment) { deployment = iterator.next(); deploymantPath = deployment.getPath(); notDeployment = ObjectUtils.notEquals(deploymantPath, parentPath); } if (notDeployment) { type = WatchFileType.NONE; } else { type = WatchFileType.DEPLOYMENT; } } else if (CollectionUtils.available(dss) && dss.contains(filePath)) { type = WatchFileType.DATA_SOURCE; } else { type = WatchFileType.NONE; } return type; } private static void fillFileList(File[] files, List<File> list) { if (CollectionUtils.available(files)) { for (File file : files) { list.add(file); } } } /** * Lists all deployed {@link File}s * * @return {@link List}<File> */ public static List<File> listDeployments() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<DeploymentDirectory> deploymetDirss = new HashSet<DeploymentDirectory>(); Set<DeploymentDirectory> deploymetDirssCurrent; for (Configuration config : configs) { deploymetDirssCurrent = config.getDeploymentPath(); if (CollectionUtils.available(deploymetDirssCurrent)) { deploymetDirss.addAll(deploymetDirssCurrent); } } File[] files; List<File> list = new ArrayList<File>(); if (CollectionUtils.available(deploymetDirss)) { String path; DeployFiletr filter = new DeployFiletr(); for (DeploymentDirectory deployment : deploymetDirss) { path = deployment.getPath(); files = new File(path).listFiles(filter); fillFileList(files, list); } } return list; } /** * Lists all data source {@link File}s * * @return {@link List}<File> */ public static List<File> listDataSources() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<String> paths = new HashSet<String>(); Set<String> pathsCurrent; for (Configuration config : configs) { pathsCurrent = config.getDataSourcePath(); if (CollectionUtils.available(pathsCurrent)) { paths.addAll(pathsCurrent); } } File file; List<File> list = new ArrayList<File>(); if (CollectionUtils.available(paths)) { for (String path : paths) { file = new File(path); list.add(file); } } return list; } public static void deployFile(String fileName) throws IOException { WatchFileType type = checkType(fileName); if (type.equals(WatchFileType.DATA_SOURCE)) { FileParsers fileParsers = new FileParsers(); fileParsers.parseStandaloneXml(fileName); } else if (type.equals(WatchFileType.DEPLOYMENT)) { URL url = getAppropriateURL(fileName); deployFile(url); } } public static void deployFile(URL url) throws IOException { URL[] archives = { url }; MetaContainer.getCreator().scanForBeans(archives); } public static void undeployFile(URL url) throws IOException { boolean valid = MetaContainer.undeploy(url); if (valid && RestContainer.hasRest()) { RestProvider.reload(); } } public static void undeployFile(String fileName) throws IOException { WatchFileType type = checkType(fileName); if (type.equals(WatchFileType.DATA_SOURCE)) { Initializer.undeploy(fileName); } else if (type.equals(WatchFileType.DEPLOYMENT)) { URL url = getAppropriateURL(fileName); undeployFile(url); } } public static void redeployFile(String fileName) throws IOException { undeployFile(fileName); deployFile(fileName); } private void handleEvent(Path dir, WatchEvent<Path> currentEvent) throws IOException { if (currentEvent == null) { return; } Path prePath = currentEvent.context(); Path path = dir.resolve(prePath); String fileName = path.toString(); int count = currentEvent.count(); Kind<?> kind = currentEvent.kind(); if (kind == StandardWatchEventKinds.ENTRY_MODIFY) { LogUtils.info(LOG, "Modify: %s, count: %s\n", fileName, count); redeployFile(fileName); } else if (kind == StandardWatchEventKinds.ENTRY_DELETE) { LogUtils.info(LOG, "Delete: %s, count: %s\n", fileName, count); undeployFile(fileName); } else if (kind == StandardWatchEventKinds.ENTRY_CREATE) { LogUtils.info(LOG, "Create: %s, count: %s\n", fileName, count); redeployFile(fileName); } } private void runService(WatchService watch) throws IOException { Path dir; boolean toRun = true; boolean valid; while (toRun) { try { WatchKey key; key = watch.take(); List<WatchEvent<?>> events = key.pollEvents(); WatchEvent<?> currentEvent = null; WatchEvent<Path> typedCurrentEvent; int times = 0; dir = (Path) key.watchable(); for (WatchEvent<?> event : events) { if (event.kind() == StandardWatchEventKinds.OVERFLOW) { continue; } if (times == 0 || event.count() > currentEvent.count()) { currentEvent = event; } times++; valid = key.reset(); toRun = valid && key.isValid(); if (toRun) { Thread.sleep(SLEEP_TIME); typedCurrentEvent = ObjectUtils.cast(currentEvent); handleEvent(dir, typedCurrentEvent); } } } catch (InterruptedException ex) { throw new IOException(ex); } } } private void registerPath(FileSystem fs, String path, WatchService watch) throws IOException { Path deployPath = fs.getPath(path); deployPath.register(watch, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_MODIFY, StandardWatchEventKinds.OVERFLOW, StandardWatchEventKinds.ENTRY_DELETE); runService(watch); } private void registerPaths(File[] files, FileSystem fs, WatchService watch) throws IOException { String path; for (File file : files) { path = file.getPath(); registerPath(fs, path, watch); } } private void registerPaths(Collection<DeploymentDirectory> deploymentDirss, FileSystem fs, WatchService watch) throws IOException { String path; boolean scan; File directory; File[] files; for (DeploymentDirectory deployment : deploymentDirss) { path = deployment.getPath(); scan = deployment.isScan(); if (scan) { directory = new File(path); files = directory.listFiles(); if (ObjectUtils.available(files)) { registerPaths(files, fs, watch); } } else { registerPath(fs, path, watch); } } } private void registerDsPaths(Collection<String> paths, FileSystem fs, WatchService watch) throws IOException { for (String path : paths) { registerPath(fs, path, watch); } } @Override public void run() { try { FileSystem fs = FileSystems.getDefault(); WatchService watch = null; try { watch = fs.newWatchService(); } catch (IOException ex) { LOG.error(ex.getMessage(), ex); throw ex; } if (ObjectUtils.available(deployments)) { registerPaths(deployments, fs, watch); } if (ObjectUtils.available(dataSources)) { registerDsPaths(dataSources, fs, watch); } } catch (IOException ex) { LOG.fatal(ex.getMessage(), ex); LOG.fatal("system going to shut down cause of hot deployment"); try { ConnectionContainer.closeConnections(); } catch (IOException iex) { LOG.fatal(iex.getMessage(), iex); } System.exit(-1); } finally { DEPLOY_POOL.shutdown(); } } public static void startWatch() { Watcher watcher = new Watcher(); DEPLOY_POOL.submit(watcher); } }
package org.intermine.webservice.server.output; import java.util.List; import java.util.Map; /** * Simplest possible formatting. Output rows are just joined with commas. * @author Alex Kalderimis * */ public class PlainFormatter extends Formatter { @Override public String formatHeader(Map<String, Object> attributes) { return ""; } @Override public String formatResult(List<String> resultRow) { StringBuilder sb = new StringBuilder(); boolean needsComma = false; for (String item: resultRow) { sb.append(item); if (needsComma) { sb.append(","); } needsComma = true; } return sb.toString(); } @Override public String formatFooter(String errorMessage, int errorCode) { return ""; } }
package xdroid.core; import android.os.Handler; import android.os.HandlerThread; import android.os.Looper; import android.os.Message; import android.os.Process; import java.util.concurrent.atomic.AtomicInteger; /** * @author Oleksii Kropachov (o.kropachov@shamanland.com) */ public final class ThreadUtils implements Runnable { private static AtomicInteger sCounter; private static Runnable sStopper; private ThreadUtils() { // disallow public access } private static String newName() { AtomicInteger counter = sCounter; if (counter == null) { counter = new AtomicInteger(); sCounter = counter; } return ThreadUtils.class.getSimpleName() + '-' + counter.incrementAndGet(); } /** * @see #newThread(String, int, Handler.Callback) */ public static Handler newThread(Handler.Callback callback) { return newThread(null, Process.THREAD_PRIORITY_LOWEST, callback); } /** * @see #newThread(String, int, Handler.Callback) */ public static Handler newThread(String name, Handler.Callback callback) { return newThread(name, Process.THREAD_PRIORITY_LOWEST, callback); } /** * Creates new {@link HandlerThread} and returns new {@link Handler} associated with this thread. * * @param name name of thread, in case of null - the default name will be generated * @param priority one of constants from {@link android.os.Process} * @param callback message handling callback, may be null * @return new instance */ public static Handler newThread(String name, int priority, Handler.Callback callback) { HandlerThread thread = new HandlerThread(name != null ? name : newName(), priority); thread.start(); return new Handler(thread.getLooper(), callback); } /** * Creates new {@link Handler} with the same {@link Looper} as the original handler. * * @param original original handler, can not be null * @param callback message handling callback, may be null * @return new instance */ public static Handler newHandler(Handler original, Handler.Callback callback) { return new Handler(original.getLooper(), callback); } /** * @see #stopThread(Handler, boolean) */ public static void stopThread(Handler handler) { stopThread(handler, true); } /** * Post the {@link Runnable} instance with the following code to the {@link Handler} provided: * <pre> * public void run() { * Looper.myLooper().quit(); * } * </pre> * * @param handler target handler, can not be null * @param asap if true then method {@link Handler#postAtFrontOfQueue(Runnable)} will be used. */ public static void stopThread(Handler handler, boolean asap) { Runnable stopper = sStopper; if (stopper == null) { stopper = new ThreadUtils(); sStopper = stopper; } if (asap) { handler.postAtFrontOfQueue(stopper); } else { handler.post(stopper); } } @Override public void run() { Looper.myLooper().quit(); } public static class ObjAsRunnableCallback implements Handler.Callback { public static final ObjAsRunnableCallback INSTANCE = new ObjAsRunnableCallback(); private ObjAsRunnableCallback() { // disallow public access } @Override public boolean handleMessage(Message message) { if (message.obj instanceof Runnable) { ((Runnable) message.obj).run(); return true; } return false; } } }
package niagaraGUI; import java.util.*; import org.jdom.*; import java.io.FileWriter; import java.io.IOException; import org.jdom.Attribute; import org.jdom.Document; import org.jdom.Element; import org.jdom.output.Format; import org.jdom.output.XMLOutputter; public class QueryPlan { private String filename; static private Hashtable<String, OperatorTemplate> opTemplates;//Table of operator templates indexed by operator name private List<Operator> opList;//List of operator Instances in the current query plan private Operator top;//reference to the top operator private String queryName;//name of the query private DTDInterpreter dtdInterp; public QueryPlan(String name, String filename) { opTemplates = new Hashtable<String, OperatorTemplate>(); dtdInterp = new DTDInterpreter(filename); opTemplates = dtdInterp.getTemplates(); opList = new ArrayList<Operator>(); } static public Hashtable<String, OperatorTemplate> getOpTemplates() { return opTemplates; } static public OperatorTemplate addTemplate(OperatorTemplate opTemplate) { return opTemplates.put(opTemplate.getName(), opTemplate); } public void generateXML(String filename) { try{ String name; String elements; String comments; Element plan = new Element("plan"); plan.setAttribute(new Attribute("top", "cons")); Document doc1 = new Document(plan); DocType type = new DocType("plan", "/stash/datalab/datastreams-student/bin/queryplan.dtd"); doc1.setDocType(type); doc1.setRootElement(plan); Iterator iterator; iterator = opList.iterator(); Operator op; HashMap<String, String> att; while (iterator.hasNext()){ op = (Operator)iterator.next(); name = op.getName(); comments = op.getComments(); elements = op.getElements(); Element ele = new Element(name); att = op.getAttributes(); Set set = att.entrySet(); Iterator i = set.iterator(); String str1; String str2; while(i.hasNext()) { Map.Entry me = (Map.Entry)i.next(); str1 = (String)me.getKey(); str2 = (String)me.getValue(); if(str2 != null) ele.setAttribute(new Attribute(str1,str2)); } if(elements != null) ele.addContent("\n" + elements + "\n"); if(comments != null) ele.addContent("//" + comments + "\n"); doc1.getRootElement().addContent(ele); } XMLOutputter xmlOutput = new XMLOutputter(); xmlOutput.setFormat(Format.getRawFormat()); xmlOutput.output(doc1, new FileWriter(filename)); System.out.println("File Saved!"); }catch (IOException io) { System.out.println(io.getMessage()); } } public String[] getOperatorNames(){ if (opTemplates != null){ Set<String> opNameSet = opTemplates.keySet(); String[] opNameAry = new String[opNameSet.size()]; opNameAry = opNameSet.toArray(opNameAry); return opNameAry; } else return null; } public void setName(String name) { queryName = name; } public String getName() { //returns the name of this query plan return queryName; } public boolean addOperatorInstance(Operator newOp){ //Adds a new instansiated operator to this queryplan if (opList.contains(newOp)){ return false; } else{ opList.add(newOp); return true; } } public boolean removeOperatorInstance(Operator toRemove){ //removes an instansiated from Operator from this query plan if (opList.contains(toRemove)){ opList.remove(toRemove); return true; } else{ return false; } } // This design pattern is in place to ease future import if // additional types are added public Boolean parse(String filename) { return parseDTD(filename); } public Boolean parse(String filename, String docType) { if(docType == null) { docType = "DTD"; } if(docType == "DTD") { return parseDTD(filename); } else return false; } private Boolean parseDTD(String filename) { return false; } public String toString() { return null; } public void setTop(Operator newTop){ if (top != null) top.setTop(false); top = newTop; top.setTop(true); } public Operator getTop(){ return top; } }
package io.jchat.android.activity; import android.app.Dialog; import android.app.NotificationManager; import android.content.ActivityNotFoundException; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.provider.MediaStore; import android.text.TextUtils; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Toast; import java.io.File; import cn.jpush.im.android.api.JMessageClient; import cn.jpush.im.android.api.callback.GetAvatarBitmapCallback; import cn.jpush.im.android.api.model.UserInfo; import io.jchat.android.R; import io.jchat.android.application.JChatDemoApplication; import io.jchat.android.controller.MeController; import io.jchat.android.tools.BitmapLoader; import io.jchat.android.tools.DialogCreator; import io.jchat.android.tools.FileHelper; import io.jchat.android.tools.HandleResponseCode; import io.jchat.android.view.MeView; public class MeFragment extends BaseFragment { private static final String TAG = MeFragment.class.getSimpleName(); private View mRootView; private MeView mMeView; private MeController mMeController; private Context mContext; private String mPath; private boolean mIsShowAvatar = false; private boolean mIsGetAvatar = false; @Override public void onCreate(Bundle savedInstanceState) { // TODO Auto-generated method stub super.onCreate(savedInstanceState); mContext = this.getActivity(); LayoutInflater layoutInflater = getActivity().getLayoutInflater(); mRootView = layoutInflater.inflate(R.layout.fragment_me, (ViewGroup) getActivity().findViewById(R.id.main_view), false); mMeView = (MeView) mRootView.findViewById(R.id.me_view); mMeView.initModule(mDensity, mWidth); mMeController = new MeController(mMeView, this, mWidth); mMeView.setListeners(mMeController); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // TODO Auto-generated method stub ViewGroup p = (ViewGroup) mRootView.getParent(); if (p != null) { p.removeAllViewsInLayout(); } return mRootView; } @Override public void onResume() { if (!mIsShowAvatar) { UserInfo myInfo = JMessageClient.getMyInfo(); if (!TextUtils.isEmpty(myInfo.getAvatar())) { myInfo.getAvatarBitmap(new GetAvatarBitmapCallback() { @Override public void gotResult(int status, String desc, Bitmap bitmap) { if (status == 0) { mMeView.showPhoto(bitmap); mIsShowAvatar = true; } else { HandleResponseCode.onHandle(mContext, status, false); } } }); } mMeView.showNickName(myInfo.getNickname()); } super.onResume(); } @Override public void onDestroy() { super.onDestroy(); } public void Logout() { // TODO Auto-generated method stub final Intent intent = new Intent(); UserInfo info = JMessageClient.getMyInfo(); if (null != info) { intent.putExtra("userName", info.getUserName()); File file = info.getAvatarFile(); if (file != null && file.isFile()) { intent.putExtra("avatarFilePath", file.getAbsolutePath()); } else { String path = FileHelper.getUserAvatarPath(info.getUserName()); file = new File(path); if (file.exists()) { intent.putExtra("avatarFilePath", file.getAbsolutePath()); } } JMessageClient.logout(); intent.setClass(mContext, ReloginActivity.class); startActivity(intent); } else { Log.d(TAG, "user info is null!"); } } public void StartSettingActivity() { Intent intent = new Intent(); intent.setClass(this.getActivity(), SettingActivity.class); startActivity(intent); } public void startMeInfoActivity() { Intent intent = new Intent(); intent.setClass(this.getActivity(), MeInfoActivity.class); startActivityForResult(intent, JChatDemoApplication.REQUEST_CODE_ME_INFO); } public void cancelNotification() { NotificationManager manager = (NotificationManager) this.getActivity().getApplicationContext() .getSystemService(Context.NOTIFICATION_SERVICE); manager.cancelAll(); } public void takePhoto() { if (FileHelper.isSdCardExist()) { mPath = FileHelper.createAvatarPath(JMessageClient.getMyInfo().getUserName()); Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE); intent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(new File(mPath))); try { getActivity().startActivityForResult(intent, JChatDemoApplication.REQUEST_CODE_TAKE_PHOTO); } catch (ActivityNotFoundException anf) { Toast.makeText(this.getActivity(), mContext.getString(R.string.camera_not_prepared), Toast.LENGTH_SHORT).show(); } } else { Toast.makeText(this.getActivity(), mContext.getString(R.string.sdcard_not_exist_toast), Toast.LENGTH_SHORT).show(); } } public String getPhotoPath() { return mPath; } public void selectImageFromLocal() { if (FileHelper.isSdCardExist()) { Intent intent; if (Build.VERSION.SDK_INT < 19) { intent = new Intent(Intent.ACTION_GET_CONTENT);
package org.lightmare.deploy.fs; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.nio.file.FileSystem; import java.nio.file.FileSystems; import java.nio.file.Path; import java.nio.file.StandardWatchEventKinds; import java.nio.file.WatchEvent; import java.nio.file.WatchEvent.Kind; import java.nio.file.WatchKey; import java.nio.file.WatchService; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.apache.log4j.Logger; import org.lightmare.cache.ConnectionContainer; import org.lightmare.cache.DeploymentDirectory; import org.lightmare.cache.MetaContainer; import org.lightmare.cache.RestContainer; import org.lightmare.config.Configuration; import org.lightmare.jpa.datasource.FileParsers; import org.lightmare.jpa.datasource.Initializer; import org.lightmare.rest.providers.RestProvider; import org.lightmare.utils.CollectionUtils; import org.lightmare.utils.LogUtils; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.concurrent.ThreadFactoryUtil; import org.lightmare.utils.fs.WatchUtils; /** * Deployment manager, {@link Watcher#deployFile(URL)}, * {@link Watcher#undeployFile(URL)}, {@link Watcher#listDeployments()} and * {@link File} modification event handler for deployments if java version is * 1.7 or above * * @author levan * @since 0.0.45-SNAPSHOT */ public class Watcher implements Runnable { private static final String DEPLOY_THREAD_NAME = "watch_thread"; private static final int DEPLOY_POOL_PRIORITY = Thread.MAX_PRIORITY - 5; private static final long SLEEP_TIME = 5500L; private static final ExecutorService DEPLOY_POOL = Executors .newSingleThreadExecutor(new ThreadFactoryUtil(DEPLOY_THREAD_NAME, DEPLOY_POOL_PRIORITY)); private Set<DeploymentDirectory> deployments; private Set<String> dataSources; private static final Logger LOG = Logger.getLogger(Watcher.class); /** * Defines file types for watch service * * @author Levan * @since 0.0.45-SNAPSHOT */ private static enum WatchFileType { DATA_SOURCE, DEPLOYMENT, NONE; } /** * To filter only deployed sub files from directory * * @author levan * @since 0.0.45-SNAPSHOT */ private static class DeployFiletr implements FileFilter { @Override public boolean accept(File file) { boolean accept; try { URL url = file.toURI().toURL(); url = WatchUtils.clearURL(url); accept = MetaContainer.chackDeployment(url); } catch (MalformedURLException ex) { LOG.error(ex.getMessage(), ex); accept = false; } catch (IOException ex) { LOG.error(ex.getMessage(), ex); accept = false; } return accept; } } private Watcher() { deployments = getDeployDirectories(); dataSources = getDataSourcePaths(); } /** * Clears and gets file {@link URL} by file name * * @param fileName * @return {@link URL} * @throws IOException */ private static URL getAppropriateURL(String fileName) throws IOException { File file = new File(fileName); URL url = file.toURI().toURL(); url = WatchUtils.clearURL(url); return url; } /** * Gets {@link Set} of {@link DeploymentDirectory} instances from * configuration * * @return {@link Set}<code><DeploymentDirectory></code> */ private static Set<DeploymentDirectory> getDeployDirectories() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<DeploymentDirectory> deploymetDirss = new HashSet<DeploymentDirectory>(); Set<DeploymentDirectory> deploymetDirssCurrent; for (Configuration config : configs) { deploymetDirssCurrent = config.getDeploymentPath(); if (config.isWatchStatus() && CollectionUtils.valid(deploymetDirssCurrent)) { deploymetDirss.addAll(deploymetDirssCurrent); } } return deploymetDirss; } /** * Gets {@link Set} of data source paths from configuration * * @return {@link Set}<code><String></code> */ private static Set<String> getDataSourcePaths() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<String> paths = new HashSet<String>(); Set<String> pathsCurrent; for (Configuration config : configs) { pathsCurrent = config.getDataSourcePath(); if (config.isWatchStatus() && CollectionUtils.valid(pathsCurrent)) { paths.addAll(pathsCurrent); } } return paths; } /** * Checks and gets appropriated {@link WatchFileType} by passed file name * * @param fileName * @return {@link WatchFileType} */ private static WatchFileType checkType(String fileName) { WatchFileType type; File file = new File(fileName); String path = file.getPath(); String filePath = WatchUtils.clearPath(path); path = file.getParent(); String parentPath = WatchUtils.clearPath(path); Set<DeploymentDirectory> apps = getDeployDirectories(); Set<String> dss = getDataSourcePaths(); if (CollectionUtils.valid(apps)) { String deploymantPath; Iterator<DeploymentDirectory> iterator = apps.iterator(); boolean notDeployment = Boolean.TRUE; DeploymentDirectory deployment; while (iterator.hasNext() && notDeployment) { deployment = iterator.next(); deploymantPath = deployment.getPath(); notDeployment = ObjectUtils.notEquals(deploymantPath, parentPath); } if (notDeployment) { type = WatchFileType.NONE; } else { type = WatchFileType.DEPLOYMENT; } } else if (CollectionUtils.valid(dss) && dss.contains(filePath)) { type = WatchFileType.DATA_SOURCE; } else { type = WatchFileType.NONE; } return type; } private static void fillFileList(File[] files, List<File> list) { if (CollectionUtils.valid(files)) { for (File file : files) { list.add(file); } } } /** * Lists all deployed {@link File}s * * @return {@link List}<File> */ public static List<File> listDeployments() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<DeploymentDirectory> deploymetDirss = new HashSet<DeploymentDirectory>(); Set<DeploymentDirectory> deploymetDirssCurrent; for (Configuration config : configs) { deploymetDirssCurrent = config.getDeploymentPath(); if (CollectionUtils.valid(deploymetDirssCurrent)) { deploymetDirss.addAll(deploymetDirssCurrent); } } File[] files; List<File> list = new ArrayList<File>(); if (CollectionUtils.valid(deploymetDirss)) { String path; DeployFiletr filter = new DeployFiletr(); for (DeploymentDirectory deployment : deploymetDirss) { path = deployment.getPath(); files = new File(path).listFiles(filter); fillFileList(files, list); } } return list; } /** * Lists all data source {@link File}s * * @return {@link List}<File> */ public static List<File> listDataSources() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<String> paths = new HashSet<String>(); Set<String> pathsCurrent; for (Configuration config : configs) { pathsCurrent = config.getDataSourcePath(); if (CollectionUtils.valid(pathsCurrent)) { paths.addAll(pathsCurrent); } } File file; List<File> list = new ArrayList<File>(); if (CollectionUtils.valid(paths)) { for (String path : paths) { file = new File(path); list.add(file); } } return list; } /** * Deploys application or data source file by passed file name * * @param fileName * @throws IOException */ public static void deployFile(String fileName) throws IOException { WatchFileType type = checkType(fileName); if (type.equals(WatchFileType.DATA_SOURCE)) { FileParsers fileParsers = new FileParsers(); fileParsers.parseStandaloneXml(fileName); } else if (type.equals(WatchFileType.DEPLOYMENT)) { URL url = getAppropriateURL(fileName); deployFile(url); } } /** * Deploys application or data source file by passed {@link URL} instance * * @param url * @throws IOException */ public static void deployFile(URL url) throws IOException { URL[] archives = { url }; MetaContainer.getCreator().scanForBeans(archives); } /** * Removes from deployments application or data source file by passed * {@link URL} instance * * @param url * @throws IOException */ public static void undeployFile(URL url) throws IOException { boolean valid = MetaContainer.undeploy(url); if (valid && RestContainer.hasRest()) { RestProvider.reload(); } } /** * Removes from deployments application or data source file by passed file * name * * @param fileName * @throws IOException */ public static void undeployFile(String fileName) throws IOException { WatchFileType type = checkType(fileName); if (type.equals(WatchFileType.DATA_SOURCE)) { Initializer.undeploy(fileName); } else if (type.equals(WatchFileType.DEPLOYMENT)) { URL url = getAppropriateURL(fileName); undeployFile(url); } } /** * Removes from deployments and deploys again application or data source * file by passed file name * * @param fileName * @throws IOException */ public static void redeployFile(String fileName) throws IOException { undeployFile(fileName); deployFile(fileName); } /** * Handles file change event * * @param dir * @param currentEvent * @throws IOException */ private void handleEvent(Path dir, WatchEvent<Path> currentEvent) throws IOException { if (ObjectUtils.notNull(currentEvent)) { Path prePath = currentEvent.context(); Path path = dir.resolve(prePath); String fileName = path.toString(); int count = currentEvent.count(); Kind<?> kind = currentEvent.kind(); if (kind == StandardWatchEventKinds.ENTRY_MODIFY) { LogUtils.info(LOG, "Modify: %s, count: %s\n", fileName, count); redeployFile(fileName); } else if (kind == StandardWatchEventKinds.ENTRY_DELETE) { LogUtils.info(LOG, "Delete: %s, count: %s\n", fileName, count); undeployFile(fileName); } else if (kind == StandardWatchEventKinds.ENTRY_CREATE) { LogUtils.info(LOG, "Create: %s, count: %s\n", fileName, count); redeployFile(fileName); } } } /** * Runs file watch service * * @param watch * @throws IOException */ private void runService(WatchService watch) throws IOException { Path dir; boolean toRun = true; boolean valid; while (toRun) { try { WatchKey key; key = watch.take(); List<WatchEvent<?>> events = key.pollEvents(); WatchEvent<?> currentEvent = null; WatchEvent<Path> typedCurrentEvent; int times = 0; dir = (Path) key.watchable(); for (WatchEvent<?> event : events) { if (event.kind() == StandardWatchEventKinds.OVERFLOW) { continue; } if (times == 0 || event.count() > currentEvent.count()) { currentEvent = event; } times++; valid = key.reset(); toRun = valid && key.isValid(); if (toRun) { Thread.sleep(SLEEP_TIME); typedCurrentEvent = ObjectUtils.cast(currentEvent); handleEvent(dir, typedCurrentEvent); } } } catch (InterruptedException ex) { throw new IOException(ex); } } } /** * * @param fs * @param path * @param watch * @throws IOException */ private void registerPath(FileSystem fs, String path, WatchService watch) throws IOException { Path deployPath = fs.getPath(path); deployPath.register(watch, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_MODIFY, StandardWatchEventKinds.OVERFLOW, StandardWatchEventKinds.ENTRY_DELETE); runService(watch); } /** * Registers passed {@link File} array to watch service * * @param files * @param fs * @param watch * @throws IOException */ private void registerPaths(File[] files, FileSystem fs, WatchService watch) throws IOException { String path; for (File file : files) { path = file.getPath(); registerPath(fs, path, watch); } } /** * Registers deployments directories to watch service * * @param deploymentDirss * @param fs * @param watch * @throws IOException */ private void registerPaths(Collection<DeploymentDirectory> deploymentDirss, FileSystem fs, WatchService watch) throws IOException { String path; boolean scan; File directory; File[] files; for (DeploymentDirectory deployment : deploymentDirss) { path = deployment.getPath(); scan = deployment.isScan(); if (scan) { directory = new File(path); files = directory.listFiles(); if (CollectionUtils.valid(files)) { registerPaths(files, fs, watch); } } else { registerPath(fs, path, watch); } } } /** * Registers data source path to watch service * * @param paths * @param fs * @param watch * @throws IOException */ private void registerDsPaths(Collection<String> paths, FileSystem fs, WatchService watch) throws IOException { for (String path : paths) { registerPath(fs, path, watch); } } @Override public void run() { try { FileSystem fs = FileSystems.getDefault(); WatchService watch = null; try { watch = fs.newWatchService(); } catch (IOException ex) { LOG.error(ex.getMessage(), ex); throw ex; } if (CollectionUtils.valid(deployments)) { registerPaths(deployments, fs, watch); } if (CollectionUtils.valid(dataSources)) { registerDsPaths(dataSources, fs, watch); } } catch (IOException ex) { LOG.fatal(ex.getMessage(), ex); LOG.fatal("system going to shut down cause of hot deployment"); try { ConnectionContainer.closeConnections(); } catch (IOException iex) { LOG.fatal(iex.getMessage(), iex); } System.exit(-1); } finally { DEPLOY_POOL.shutdown(); } } /** * Starts watch service for application and data source files */ public static void startWatch() { Watcher watcher = new Watcher(); DEPLOY_POOL.submit(watcher); } }
package com.jme3.app; import android.app.Activity; import android.app.AlertDialog; import android.content.DialogInterface; import android.content.pm.ActivityInfo; import android.graphics.drawable.Drawable; import android.graphics.drawable.NinePatchDrawable; import android.os.Bundle; import android.util.Log; import android.view.*; import android.view.ViewGroup.LayoutParams; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.TextView; import com.jme3.audio.AudioRenderer; import com.jme3.audio.android.AndroidAudioRenderer; import com.jme3.input.JoyInput; import com.jme3.input.TouchInput; import com.jme3.input.android.AndroidSensorJoyInput; import com.jme3.input.controls.TouchListener; import com.jme3.input.controls.TouchTrigger; import com.jme3.input.event.TouchEvent; import com.jme3.renderer.android.AndroidGLSurfaceView; import com.jme3.system.AppSettings; import com.jme3.system.SystemListener; import com.jme3.system.android.AndroidConfigChooser; import com.jme3.system.android.AndroidConfigChooser.ConfigType; import com.jme3.system.android.JmeAndroidSystem; import com.jme3.system.android.OGLESContext; import com.jme3.util.AndroidLogHandler; import java.io.PrintWriter; import java.io.StringWriter; import java.util.logging.Handler; import java.util.logging.Level; import java.util.logging.LogManager; import java.util.logging.Logger; /** * <code>AndroidHarness</code> wraps a jme application object and runs it on * Android * * @author Kirill * @author larynx */ public class AndroidHarness extends Activity implements TouchListener, DialogInterface.OnClickListener, SystemListener { protected final static Logger logger = Logger.getLogger(AndroidHarness.class.getName()); /** * The application class to start */ protected String appClass = "jme3test.android.Test"; /** * The jme3 application object */ protected Application app = null; /** * ConfigType.FASTEST is RGB565, GLSurfaceView default ConfigType.BEST is * RGBA8888 or better if supported by the hardware */ protected ConfigType eglConfigType = ConfigType.FASTEST; /** * If true all valid and not valid egl configs are logged * @deprecated this has no use */ @Deprecated protected boolean eglConfigVerboseLogging = false; /** * set to 2, 4 to enable multisampling. */ protected int antiAliasingSamples = 0; /** * Sets the type of Audio Renderer to be used. * <p> * Android MediaPlayer / SoundPool is the default and can be used on all * supported Android platform versions (2.2+)<br> * OpenAL Soft uses an OpenSL backend and is only supported on Android * versions 2.3+. * <p> * Only use ANDROID_ static strings found in AppSettings * */ protected String audioRendererType = AppSettings.ANDROID_MEDIAPLAYER; /** * If true Android Sensors are used as simulated Joysticks. Users can use the * Android sensor feedback through the RawInputListener or by registering * JoyAxisTriggers. */ protected boolean joystickEventsEnabled = false; /** * If true MouseEvents are generated from TouchEvents */ protected boolean mouseEventsEnabled = true; /** * Flip X axis */ protected boolean mouseEventsInvertX = false; /** * Flip Y axis */ protected boolean mouseEventsInvertY = false; /** * if true finish this activity when the jme app is stopped */ protected boolean finishOnAppStop = true; /** * set to false if you don't want the harness to handle the exit hook */ protected boolean handleExitHook = true; /** * Title of the exit dialog, default is "Do you want to exit?" */ protected String exitDialogTitle = "Do you want to exit?"; /** * Message of the exit dialog, default is "Use your home key to bring this * app into the background or exit to terminate it." */ protected String exitDialogMessage = "Use your home key to bring this app into the background or exit to terminate it."; /** * Set the screen window mode. If screenFullSize is true, then the * notification bar and title bar are removed and the screen covers the * entire display. If screenFullSize is false, then the notification bar * remains visible if screenShowTitle is true while screenFullScreen is * false, then the title bar is also displayed under the notification bar. */ protected boolean screenFullScreen = true; /** * if screenShowTitle is true while screenFullScreen is false, then the * title bar is also displayed under the notification bar */ protected boolean screenShowTitle = true; /** * Splash Screen picture Resource ID. If a Splash Screen is desired, set * splashPicID to the value of the Resource ID (i.e. R.drawable.picname). If * splashPicID = 0, then no splash screen will be displayed. */ protected int splashPicID = 0; /** * Set the screen orientation, default is SENSOR * ActivityInfo.SCREEN_ORIENTATION_* constants package * android.content.pm.ActivityInfo * * SCREEN_ORIENTATION_UNSPECIFIED SCREEN_ORIENTATION_LANDSCAPE * SCREEN_ORIENTATION_PORTRAIT SCREEN_ORIENTATION_USER * SCREEN_ORIENTATION_BEHIND SCREEN_ORIENTATION_SENSOR (default) * SCREEN_ORIENTATION_NOSENSOR */ protected int screenOrientation = ActivityInfo.SCREEN_ORIENTATION_SENSOR; protected OGLESContext ctx; protected AndroidGLSurfaceView view = null; protected boolean isGLThreadPaused = true; protected ImageView splashImageView = null; protected FrameLayout frameLayout = null; final private String ESCAPE_EVENT = "TouchEscape"; private boolean firstDrawFrame = true; private boolean inConfigChange = false; private class DataObject { protected Application app = null; } @Override public Object onRetainNonConfigurationInstance() { logger.log(Level.FINE, "onRetainNonConfigurationInstance"); final DataObject data = new DataObject(); data.app = this.app; inConfigChange = true; return data; } @Override public void onCreate(Bundle savedInstanceState) { initializeLogHandler(); logger.fine("onCreate"); super.onCreate(savedInstanceState); JmeAndroidSystem.setActivity(this); if (screenFullScreen) { requestWindowFeature(Window.FEATURE_NO_TITLE); getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN); } else { if (!screenShowTitle) { requestWindowFeature(Window.FEATURE_NO_TITLE); } } setRequestedOrientation(screenOrientation); final DataObject data = (DataObject) getLastNonConfigurationInstance(); if (data != null) { logger.log(Level.FINE, "Using Retained App"); this.app = data.app; } else { // Discover the screen reolution //TODO try to find a better way to get a hand on the resolution WindowManager wind = this.getWindowManager(); Display disp = wind.getDefaultDisplay(); Log.d("AndroidHarness", "Resolution from Window, width:" + disp.getWidth() + ", height: " + disp.getHeight()); // Create Settings logger.log(Level.FINE, "Creating settings"); AppSettings settings = new AppSettings(true); settings.setEmulateMouse(mouseEventsEnabled); settings.setEmulateMouseFlipAxis(mouseEventsInvertX, mouseEventsInvertY); settings.setUseJoysticks(joystickEventsEnabled); settings.setSamples(antiAliasingSamples); settings.setResolution(disp.getWidth(), disp.getHeight()); settings.put(AndroidConfigChooser.SETTINGS_CONFIG_TYPE, eglConfigType); settings.setAudioRenderer(audioRendererType); // Create application instance try { if (app == null) { @SuppressWarnings("unchecked") Class<? extends Application> clazz = (Class<? extends Application>) Class.forName(appClass); app = clazz.newInstance(); } app.setSettings(settings); app.start(); } catch (Exception ex) { handleError("Class " + appClass + " init failed", ex); setContentView(new TextView(this)); } } ctx = (OGLESContext) app.getContext(); view = ctx.createView(); // AndroidHarness wraps the app as a SystemListener. ctx.setSystemListener(this); layoutDisplay(); } @Override protected void onRestart() { logger.fine("onRestart"); super.onRestart(); if (app != null) { app.restart(); } } @Override protected void onStart() { logger.fine("onStart"); super.onStart(); } @Override protected void onResume() { logger.fine("onResume"); super.onResume(); gainFocus(); } @Override protected void onPause() { logger.fine("onPause"); loseFocus(); super.onPause(); } @Override protected void onStop() { logger.fine("onStop"); super.onStop(); } @Override protected void onDestroy() { logger.fine("onDestroy"); final DataObject data = (DataObject) getLastNonConfigurationInstance(); if (data != null || inConfigChange) { logger.fine("In Config Change, not stopping app."); } else { if (app != null) { app.stop(!isGLThreadPaused); } } setContentView(new TextView(this)); JmeAndroidSystem.setActivity(null); ctx = null; app = null; view = null; super.onDestroy(); } public Application getJmeApplication() { return app; } /** * Called when an error has occurred. By default, will show an error message * to the user and print the exception/error to the log. */ @Override public void handleError(final String errorMsg, final Throwable t) { String stackTrace = ""; String title = "Error"; if (t != null) { // Convert exception to string StringWriter sw = new StringWriter(100); t.printStackTrace(new PrintWriter(sw)); stackTrace = sw.toString(); title = t.toString(); } final String finalTitle = title; final String finalMsg = (errorMsg != null ? errorMsg : "Uncaught Exception") + "\n" + stackTrace; logger.log(Level.SEVERE, finalMsg); runOnUiThread(new Runnable() { @Override public void run() { AlertDialog dialog = new AlertDialog.Builder(AndroidHarness.this) // .setIcon(R.drawable.alert_dialog_icon) .setTitle(finalTitle).setPositiveButton("Kill", AndroidHarness.this).setMessage(finalMsg).create(); dialog.show(); } }); } /** * Called by the android alert dialog, terminate the activity and OpenGL * rendering * * @param dialog * @param whichButton */ public void onClick(DialogInterface dialog, int whichButton) { if (whichButton != -2) { if (app != null) { app.stop(true); } app = null; this.finish(); } } /** * Gets called by the InputManager on all touch/drag/scale events */ @Override public void onTouch(String name, TouchEvent evt, float tpf) { if (name.equals(ESCAPE_EVENT)) { switch (evt.getType()) { case KEY_UP: runOnUiThread(new Runnable() { @Override public void run() { AlertDialog dialog = new AlertDialog.Builder(AndroidHarness.this) // .setIcon(R.drawable.alert_dialog_icon) .setTitle(exitDialogTitle).setPositiveButton("Yes", AndroidHarness.this).setNegativeButton("No", AndroidHarness.this).setMessage(exitDialogMessage).create(); dialog.show(); } }); break; default: break; } } } public void layoutDisplay() { logger.log(Level.FINE, "Splash Screen Picture Resource ID: {0}", splashPicID); if (view == null) { logger.log(Level.FINE, "view is null!"); } if (splashPicID != 0) { FrameLayout.LayoutParams lp = new FrameLayout.LayoutParams( LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT, Gravity.CENTER); frameLayout = new FrameLayout(this); splashImageView = new ImageView(this); Drawable drawable = this.getResources().getDrawable(splashPicID); if (drawable instanceof NinePatchDrawable) { splashImageView.setBackgroundDrawable(drawable); } else { splashImageView.setImageResource(splashPicID); } if (view.getParent() != null) { ((ViewGroup) view.getParent()).removeView(view); } frameLayout.addView(view); if (splashImageView.getParent() != null) { ((ViewGroup) splashImageView.getParent()).removeView(splashImageView); } frameLayout.addView(splashImageView, lp); setContentView(frameLayout); logger.log(Level.FINE, "Splash Screen Created"); } else { logger.log(Level.FINE, "Splash Screen Skipped."); setContentView(view); } } public void removeSplashScreen() { logger.log(Level.FINE, "Splash Screen Picture Resource ID: {0}", splashPicID); if (splashPicID != 0) { if (frameLayout != null) { if (splashImageView != null) { this.runOnUiThread(new Runnable() { @Override public void run() { splashImageView.setVisibility(View.INVISIBLE); frameLayout.removeView(splashImageView); } }); } else { logger.log(Level.FINE, "splashImageView is null"); } } else { logger.log(Level.FINE, "frameLayout is null"); } } } /** * Removes the standard Android log handler due to an issue with not logging * entries lower than INFO level and adds a handler that produces * JME formatted log messages. */ protected void initializeLogHandler() { Logger log = LogManager.getLogManager().getLogger(""); for (Handler handler : log.getHandlers()) { if (log.getLevel() != null && log.getLevel().intValue() <= Level.FINE.intValue()) { Log.v("AndroidHarness", "Removing Handler class: " + handler.getClass().getName()); } log.removeHandler(handler); } Handler handler = new AndroidLogHandler(); log.addHandler(handler); handler.setLevel(Level.ALL); } public void initialize() { app.initialize(); if (handleExitHook) { app.getInputManager().addMapping(ESCAPE_EVENT, new TouchTrigger(TouchInput.KEYCODE_BACK)); app.getInputManager().addListener(this, new String[]{ESCAPE_EVENT}); } } public void reshape(int width, int height) { app.reshape(width, height); } public void update() { app.update(); // call to remove the splash screen, if present. // call after app.update() to make sure no gap between // splash screen going away and app display being shown. if (firstDrawFrame) { removeSplashScreen(); firstDrawFrame = false; } } public void requestClose(boolean esc) { app.requestClose(esc); } public void destroy() { if (app != null) { app.destroy(); } if (finishOnAppStop) { finish(); } } public void gainFocus() { logger.fine("gainFocus"); if (view != null) { view.onResume(); } if (app != null) { //resume the audio AudioRenderer result = app.getAudioRenderer(); if (result != null) { if (result instanceof AndroidAudioRenderer) { AndroidAudioRenderer renderer = (AndroidAudioRenderer) result; renderer.resumeAll(); } } //resume the sensors (aka joysticks) if (app.getContext() != null) { JoyInput joyInput = app.getContext().getJoyInput(); if (joyInput != null) { if (joyInput instanceof AndroidSensorJoyInput) { AndroidSensorJoyInput androidJoyInput = (AndroidSensorJoyInput) joyInput; androidJoyInput.resumeSensors(); } } } } isGLThreadPaused = false; if (app != null) { app.gainFocus(); } } public void loseFocus() { logger.fine("loseFocus"); if (app != null) { app.loseFocus(); } if (view != null) { view.onPause(); } if (app != null) { //pause the audio AudioRenderer result = app.getAudioRenderer(); if (result != null) { logger.log(Level.FINE, "pause: {0}", result.getClass().getSimpleName()); if (result instanceof AndroidAudioRenderer) { AndroidAudioRenderer renderer = (AndroidAudioRenderer) result; renderer.pauseAll(); } } //pause the sensors (aka joysticks) if (app.getContext() != null) { JoyInput joyInput = app.getContext().getJoyInput(); if (joyInput != null) { if (joyInput instanceof AndroidSensorJoyInput) { AndroidSensorJoyInput androidJoyInput = (AndroidSensorJoyInput) joyInput; androidJoyInput.pauseSensors(); } } } } isGLThreadPaused = true; } }
package org.cytoscape.io.internal.write.sif; import static org.junit.Assert.*; import static org.junit.Assert.assertNotNull; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FileReader; import java.io.OutputStream; import java.util.ArrayList; import java.util.List; import org.cytoscape.ding.NetworkViewTestSupport; import org.cytoscape.equations.internal.StringList; import org.cytoscape.model.CyEdge; import org.cytoscape.model.CyNetwork; import org.cytoscape.model.CyNode; import org.junit.After; import org.junit.Before; import org.junit.Test; // TODO: Add more complicated test cases. public class SifWriterTest { private NetworkViewTestSupport support = new NetworkViewTestSupport(); private CyNetwork network1; @Before public void setUp() throws Exception { network1 = support.getNetwork(); CyNode n1 = network1.addNode(); CyNode n2 = network1.addNode(); CyNode n3 = network1.addNode(); // Not connected CyNode n4 = network1.addNode(); CyEdge e1 = network1.addEdge(n1, n2, true); CyEdge e2 = network1.addEdge(n2, n3, true); CyEdge e3 = network1.addEdge(n1, n3, true); CyEdge e1self = network1.addEdge(n1, n1, true); network1.getRow(n1).set(CyNetwork.NAME, "n1"); network1.getRow(n2).set(CyNetwork.NAME, "n2 "); network1.getRow(n3).set(CyNetwork.NAME, "n3"); network1.getRow(n4).set(CyNetwork.NAME, "Alone"); network1.getRow(e1).set(CyNetwork.NAME, "e1"); network1.getRow(e2).set(CyNetwork.NAME, "e2"); network1.getRow(e3).set(CyNetwork.NAME, "e3"); network1.getRow(e1self).set(CyNetwork.NAME, "e1self"); } @After public void tearDown() throws Exception { } @Test public void testSifWriter() throws Exception { assertNotNull(network1); File temp = File.createTempFile("network1", ".sif"); temp.deleteOnExit(); OutputStream os = new FileOutputStream(temp); SifWriter writer = new SifWriter(os, network1); writer.run(null); os.close(); // Read contents System.out.println("Temp = " + temp.getAbsolutePath()); BufferedReader reader = new BufferedReader(new FileReader((temp))); String line = null; final List<String> lines = new ArrayList<String>(); while((line = reader.readLine()) != null) { lines.add(line); System.out.println("Line = " + line); } // Total 5 Edges assertEquals(5, lines.size()); assertTrue(lines.contains("Alone")); assertTrue(lines.contains("n1\t-\tn1")); reader.close(); } }
package org.minimalj.frontend.page; import java.util.List; import org.minimalj.frontend.Frontend.IContent; import org.minimalj.frontend.action.Action; import org.minimalj.security.AccessControl; import org.minimalj.security.Authorization; import org.minimalj.security.Subject; import org.minimalj.util.StringUtils; import org.minimalj.util.resources.Resources; /** * Pages are one of the building blocks of an application. They are intended to fill the whole space * of a window (or simply the display space if the frontend has no windows). Their content is static * in contrast to dialogs which are meant to allow inputs.<p> * * A page can have a title and a content. Pages are meant to be ContentProvider. Pages are * allocated a lot. They should be lightweight! The heavy stuff should be done when calling * getContent(). * */ public abstract class Page implements AccessControl { public String getTitle() { return Resources.getString(getClass()); } public abstract IContent getContent(); /** * * @return List of Action specific to this page and it's content. These * Actions can be displayed by the frontend as context menu or at * the right side of the page. Actions can be enabled or disabled * but not added or removed. Actions can be grouped with an * ActionGroup. */ public List<Action> getActions() { return null; } private static final String ALLOWED_CHARS = "-._~:/?#[]@!$&'()*+,;=%"; // #% additional to URL Fragment /** * Route String must obey some rules to be valid: * <UL> * <LI>start with a '/'</LI> * <LI>no '/' at end</LI> * <LI>contain no '..'</LI> * <LI>all characters must be letter, digits or in ALLOWED_CHARS</LI> * </UL> * * @param route String provided by a page * @return Frontend will accept route or not * @see java.util.Base64#getUrlEncoder */ public static boolean validateRoute(String route) { if (StringUtils.isEmpty(route)) { return false; } if (route.charAt(0) != '/' || route.length() > 1 && route.endsWith("/")) { return false; } if (route.contains("..")) { return false; } for (int i = 1; i < route.length(); i++) { char c = route.charAt(i); if (!(c >= 'a' && c <= 'z' || c >= 'A' && c <= 'Z' || c >= '0' && c <= '9' || ALLOWED_CHARS.indexOf(c) >= 0)) { return false; } } return true; } /** * This default implementation handles access by annotations. This can be * overridden to grant access not only by the page class but also by the data * the page will show. * * @param subject the current subject * @return true if the current subject can access this page. */ @Override public boolean hasAccess(Subject subject) { return !Boolean.FALSE.equals(Authorization.hasAccessByAnnotation(subject, getClass())); } }
package nars.regulation.twopoint; import java.awt.Color; import java.awt.Dimension; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Insets; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.List; import java.util.Random; import javax.swing.JPanel; import nars.core.Memory; import nars.core.NAR; import nars.core.Parameters; import nars.core.build.Default; import nars.entity.Task; import nars.gui.NARSwing; import nars.language.Term; import nars.operator.Operation; import nars.operator.Operator; /** * * @author patrick.hammer */ public class drawPanel extends JPanel { int inc=0; int lastinc=0; public class move extends Operator { public move() { super("^move"); } @Override protected List<Task> execute(Operation operation, Term[] args, Memory memory) { if(args.length==2) { //left, self inc++; if(args[0].toString().equals("left")) { x-=10; if(x>setpoint) { nar.addInput("<SELF --> [good]>. :|: %1.00;0.90%"); } else { nar.addInput("<SELF --> [good]>. :|: %0.00;0.90%"); } } if(args[0].toString().equals("right")) { x+=10; if(x>setpoint) { nar.addInput("<SELF --> [good]>. :|: %0.00;0.90%"); } else { nar.addInput("<SELF --> [good]>. :|: %1.00;0.90%"); } } } return null; } } NAR nar; public drawPanel() { Parameters.CURIOSITY_ALSO_ON_LOW_CONFIDENT_HIGH_PRIORITY_BELIEF=false; nar=new Default().build(); nar.addPlugin(new move()); //new NARSwing(nar); nar.addInput("<SELF --> [good]>!"); new javax.swing.Timer(30, new ActionListener(){ @Override public void actionPerformed(ActionEvent e) { repaint(); } }).start(); } int setpoint=80; int x=160; int y=10; int k=0; private void doDrawing(Graphics g) { int modu=10; boolean cond = (inc!=lastinc); lastinc=inc; if(k<1) { //nar.addInput("move(left). :|: %0.00;0.99%"); // nar.addInput("move(right). :|: %0.00;0.99%"); nar.addInput("move(left)! :|:"); nar.addInput("move(right)! :|:"); } if((cond || k%50==0) && x==setpoint) { nar.addInput("<SELF --> [good]>. :|: %1.00;0.90%"); } if(cond) { System.out.println(x); if(cond) { nar.addInput("<SELF --> [good]>! :|:"); } if(x>setpoint) { nar.addInput("<target --> left>. :|:"); //nar.addInput("move(left)! :|:"); } if(x<setpoint) { nar.addInput("<target --> right>. :|:"); // nar.addInput("move(right)! :|:"); } } k++; nar.step(100); Graphics2D g2d = (Graphics2D) g; g2d.setColor(Color.blue); g2d.fillOval(x, y, 10, 10); g2d.setColor(Color.red); g2d.fillOval(setpoint, y, 10, 10); /*for (int i = 0; i <= 1000; i++) { Dimension size = getSize(); Insets insets = getInsets(); int w = size.width - insets.left - insets.right; int h = size.height - insets.top - insets.bottom; Random r = new Random(); int x = Math.abs(r.nextInt()) % w; int y = Math.abs(r.nextInt()) % h; g2d.drawLine(x, y, x, y); }*/ } @Override public void paintComponent(Graphics g) { super.paintComponent(g); doDrawing(g); } }
package jacobi.core.decomp.qr; import jacobi.core.decomp.qr.step.QRStep; import jacobi.api.Matrix; import jacobi.core.decomp.qr.step.SingleStep2x2; import jacobi.core.util.Throw; /** * Basic QR algorithm implementation with given iteration implementation. * * Basic QR algorithm goes as the following: * * Given a Hessenberg matrix A, find f(A) with some shifting strategy * Find Q*R = f(A), s.t. Q is orthogonal and R upper triangular * Compute A' = R*Q, and ~A = f^-1(A') * Repeat until ~A is upper triangular. * * When a sub-diagonal entry of A is close to zero, A can be deflated into * two separate matrices, and perform the iteration isolated. Depending on * whether whole Schur form is required or only eigenvalues, computes are * not necessary for columns beyond the upper left corner. * * This class is to perform the iteration given the step of each iteration. * * @author Y.K. Chan */ public class BasicQR implements QRStrategy { /** * Constructor. * @param step Implementation to perform an iteration. */ public BasicQR(QRStep step) { this.step = new SingleStep2x2(step); } @Override public Matrix compute(Matrix matrix, Matrix partner, boolean fullUpper) { Throw.when() .isNull(() -> matrix, () -> "No matrix to compute.") .isTrue( () -> matrix.getRowCount() != matrix.getColCount(), () -> "Unable to compute a non-square " + matrix.getRowCount() + "x" + matrix.getColCount() + " matrix.") .isTrue( () -> partner != null && matrix.getRowCount() != partner.getRowCount(), () -> "Mismatch partner matrix having " + partner.getRowCount() + " rows."); this.compute(matrix, partner, 0, matrix.getRowCount(), fullUpper); return matrix; } /** * Iterate using QR step within the limited range until it converges or exhausted. * @param matrix Input matrix A * @param partner Partner matrix * @param beginRow Begin index of row of interest * @param endRow End index of row of interest * @param fullUpper True is full upper triangular matrix required, false otherwise */ protected void compute(Matrix matrix, Matrix partner, int beginRow, int endRow, boolean fullUpper) { if(endRow - beginRow < 2){ return; } if(endRow - beginRow == 2){ this.step.compute(matrix, partner, beginRow, endRow, fullUpper); return; } int limit = LIMIT * (endRow - beginRow); int end = this.deflate(matrix, beginRow, endRow); for(int k = 0; k < limit; k++){ this.step.compute(matrix, partner, beginRow, end, fullUpper); int conv = this.getConverged(matrix, beginRow, endRow); if (conv >= 0) { this.compute(matrix, partner, beginRow, conv, fullUpper); this.compute(matrix, partner, conv, endRow, fullUpper); return; } } throw new UnsupportedOperationException("Exhaused to converge any entry in " + limit + " iterations."); } /** * Find the index of the first zero entry in sub-diagonal. * @param matrix Input matrix * @param begin Begin index of row of interest * @param end End index of row of interest * @return Index of first zero entry, -1 if none was found. */ protected int getConverged(Matrix matrix, int begin, int end) { for(int i = end - 1; i > begin; i if(Math.abs(matrix.get(i, i - 1)) < EPSILON){ return i; } } return -1; } /** * Find the index of the last non-zero entry in sub-diagonal * @param matrix Input matrix * @param begin Begin index of row of interest * @param end End index of row of interest * @return Index of last zero entry, end - 1 if none was found. */ protected int deflate(Matrix matrix, int begin, int end) { int k = end - 1; while(k > begin){ if(Math.abs(matrix.get(k, k - 1)) > EPSILON){ break; } } return k + 1; } private QRStep step; private static final double EPSILON = 1e-12; private static final int LIMIT = 128; }
package org.psjava.javautil; public class Java1DArray { @SuppressWarnings("unchecked") public static <T> T[] create(Class<?> clazz, int n) { return (T[]) java.lang.reflect.Array.newInstance(clazz, n); } }
package org.selfip.bkimmel.util; /** * Utility methods for working with strings. * @author brad */ public final class StringUtil { /** The hexadecimal digits. */ private static final char[] hexDigits = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' }; /** * Converts the specified byte value to a two digit hex string. * @param b The <code>byte</code> value to convert to a string. * @return The two digit hexadecimal representation of <code>b</code>. */ public static String toHex(byte b) { final char[] string = { hexDigits[(b >> 4) & 0x0f], hexDigits[b & 0x0f] }; return new String(string); } /** * Converts the specified array of bytes to a hex string. * @param bytes The array of bytes to convert to a string. * @return The hexadecimal representation of <code>bytes</code>. */ public static String toHex(byte[] bytes) { final char[] string = new char[2 * bytes.length]; int i = 0; for (byte b : bytes) { string[i++] = hexDigits[(b >> 4) & 0x0f]; string[i++] = hexDigits[b & 0x0f]; } return new String(string); } /** * Converts a number expressed in hexadecimal to a byte array. * @param hex The <code>String</code> representation of the number, in * hexadecimal. * @return The byte array represented by <code>hex</code>. */ public static byte[] hexToByteArray(String hex) { int length = hex.length(); byte[] result = new byte[(length / 2) + (length % 2)]; for (int i = length, j = result.length - 1; i > 0; i -= 2, j result[j] = hexToByte(hex.charAt(i - 1)); if (i > 1) { result[j] |= (hexToByte(hex.charAt(i - 2)) << 4); } } return result; } /** * Converts a hexadecimal digit to a byte. * @param hex The hexadecimal digit. * @return The byte value corresponding to <code>hex</code>. */ public static byte hexToByte(char hex) { if ('0' <= hex && hex <= '9') { return (byte) (hex - '0'); } else if ('A' <= hex && hex <= 'F') { return (byte) (10 + hex - 'A'); } else if ('a' <= hex && hex <= 'f') { return (byte) (10 + hex - 'a'); } else { throw new IllegalArgumentException(String.format("'%c' is not a hexadecimal digit.", hex)); } } /** Declared private to prevent this class from being instantiated. */ private StringUtil() {} }
package org.recap.route; import org.recap.model.etl.BibPersisterCallable; import org.recap.model.etl.LoadReportEntity; import org.recap.model.jaxb.BibRecord; import org.recap.model.jaxb.JAXBHandler; import org.recap.model.jpa.*; import org.recap.repository.BibliographicDetailsRepository; import org.recap.repository.CollectionGroupDetailsRepository; import org.recap.repository.InstitutionDetailsRepository; import org.recap.repository.ItemStatusDetailsRepository; import org.recap.util.CsvUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.stereotype.Component; import org.springframework.util.CollectionUtils; import java.util.*; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; @Component public class RecordProcessor { private Logger logger = LoggerFactory.getLogger(RecordProcessor.class); private Map institutionEntityMap; private Map itemStatusMap; private Map collectionGroupMap; private JAXBHandler jaxbHandler; @Autowired private BibliographicDetailsRepository bibliographicDetailsRepository; @Autowired private InstitutionDetailsRepository institutionDetailsRepository; @Autowired private ItemStatusDetailsRepository itemStatusDetailsRepository; @Autowired private CollectionGroupDetailsRepository collectionGroupDetailsRepository; @Autowired CsvUtil csvUtil; @Autowired BibDataProcessor bibDataProcessor; public void process(Page<XmlRecordEntity> xmlRecordEntities) { logger.info("Processor: " + Thread.currentThread().getName()); List<Future> futures = new ArrayList<>(); List<BibliographicEntity> bibliographicEntities = new ArrayList<>(); List<LoadReportEntity> loadReportEntities = new ArrayList<>(); BibRecord bibRecord = null; ExecutorService executorService = Executors.newFixedThreadPool(50); for (Iterator<XmlRecordEntity> iterator = xmlRecordEntities.iterator(); iterator.hasNext(); ) { XmlRecordEntity xmlRecordEntity = iterator.next(); String xml = xmlRecordEntity.getXml(); bibRecord = (BibRecord) getJaxbHandler().unmarshal(xml, BibRecord.class); Future submit = executorService.submit(new BibPersisterCallable(bibRecord, getInstitutionEntityMap(), getItemStatusMap(), getCollectionGroupMap())); if (null != submit) { futures.add(submit); } } for (Iterator<Future> iterator = futures.iterator(); iterator.hasNext(); ) { Future future = iterator.next(); Object object = null; try { object = future.get(); } catch (InterruptedException e) { e.printStackTrace(); } catch (ExecutionException e) { e.printStackTrace(); } Map<String, Object> map = (Map<String, Object>) object; if (object != null) { Object bibliographicEntity = map.get("bibliographicEntity"); Object loadReportEntity = map.get("loadReportEntity"); if (bibliographicEntity != null) { bibliographicEntities.add((BibliographicEntity) bibliographicEntity); } else if (loadReportEntity != null) { loadReportEntities.add((LoadReportEntity) loadReportEntity); } } } if (!CollectionUtils.isEmpty(bibliographicEntities)) { ETLExchange etlExchange = new ETLExchange(); etlExchange.setBibliographicEntities(bibliographicEntities); etlExchange.setInstitutionEntityMap(getInstitutionEntityMap()); etlExchange.setCollectionGroupMap(getCollectionGroupMap()); bibDataProcessor.processMessage(etlExchange); } if (!CollectionUtils.isEmpty(loadReportEntities)) { csvUtil.writeLoadReportToCsv(loadReportEntities); } executorService.shutdown(); } public JAXBHandler getJaxbHandler() { if (null == jaxbHandler) { jaxbHandler = JAXBHandler.getInstance(); } return jaxbHandler; } public Map getInstitutionEntityMap() { if (null == institutionEntityMap) { institutionEntityMap = new HashMap(); Iterable<InstitutionEntity> institutionEntities = institutionDetailsRepository.findAll(); for (Iterator<InstitutionEntity> iterator = institutionEntities.iterator(); iterator.hasNext(); ) { InstitutionEntity institutionEntity = iterator.next(); institutionEntityMap.put(institutionEntity.getInstitutionCode(), institutionEntity.getInstitutionId()); } } return institutionEntityMap; } public Map getItemStatusMap() { if (null == itemStatusMap) { itemStatusMap = new HashMap(); Iterable<ItemStatusEntity> itemStatusEntities = itemStatusDetailsRepository.findAll(); for (Iterator<ItemStatusEntity> iterator = itemStatusEntities.iterator(); iterator.hasNext(); ) { ItemStatusEntity itemStatusEntity = iterator.next(); itemStatusMap.put(itemStatusEntity.getStatusCode(), itemStatusEntity.getItemStatusId()); } } return itemStatusMap; } public Map getCollectionGroupMap() { if (null == collectionGroupMap) { collectionGroupMap = new HashMap(); Iterable<CollectionGroupEntity> collectionGroupEntities = collectionGroupDetailsRepository.findAll(); for (Iterator<CollectionGroupEntity> iterator = collectionGroupEntities.iterator(); iterator.hasNext(); ) { CollectionGroupEntity collectionGroupEntity = iterator.next(); collectionGroupMap.put(collectionGroupEntity.getCollectionGroupCode(), collectionGroupEntity.getCollectionGroupId()); } } return collectionGroupMap; } }
package org.scijava.io; import java.io.IOException; import org.scijava.event.EventService; import org.scijava.io.event.DataOpenedEvent; import org.scijava.io.event.DataSavedEvent; import org.scijava.log.LogService; import org.scijava.plugin.AbstractHandlerService; import org.scijava.plugin.Parameter; import org.scijava.plugin.Plugin; import org.scijava.service.Service; /** * Default implementation of {@link IOService}. * * @author Curtis Rueden */ @Plugin(type = Service.class) public final class DefaultIOService extends AbstractHandlerService<String, IOPlugin<?>> implements IOService { @Parameter private LogService log; @Parameter private EventService eventService; // -- IOService methods -- @Override public IOPlugin<?> getOpener(final String source) { for (final IOPlugin<?> handler : getInstances()) { if (handler.supportsOpen(source)) return handler; } return null; } @Override public <D> IOPlugin<D> getSaver(final D data, final String destination) { for (final IOPlugin<?> handler : getInstances()) { if (handler.supportsSave(data, destination)) { @SuppressWarnings("unchecked") final IOPlugin<D> typedHandler = (IOPlugin<D>) handler; return typedHandler; } } return null; } @Override public Object open(final String source) throws IOException { IOPlugin<?> opener = getOpener(source); Object data = null; if (opener != null) { data= opener.open(source); } if (data != null) { eventService.publish(new DataOpenedEvent(source, data)); } return data; } @Override public void save(final Object data, final String destination) throws IOException { IOPlugin<Object> saver = getSaver(data, destination); if (saver != null) { saver.save(data, destination); } eventService.publish(new DataSavedEvent(destination, data)); } // -- HandlerService methods -- @Override @SuppressWarnings({ "rawtypes", "unchecked" }) public Class<IOPlugin<?>> getPluginType() { return (Class) IOPlugin.class; } @Override public Class<String> getType() { return String.class; } }
package org.usfirst.frc.team2503; public class Constants { public static final int leftTalonPort = 0; public static final int rightTalonPort = 1; public static final int winchTalonPort = 2; public static final int upperLightsRelayPort = 0; public static final int underGlowLightsRelayPort = 1; public static int compressorPort = 0; public static final int winchLowerLimitSwitchChannel = 0; public static final int winchUpperLimitSwitchChannel = 1; public static final int driveBaseLeftSolenoidChannel = 1; public static final int driveBaseRightSolenoidChannel = 0; public static final double inputIndicationNullZone = 0.125; public static final double drivePrecisionMultiplier = 0.3; public static final double masterPowerMultiplier = 1.0; public static final String piBaseUrl = "http://192.168.1.103:5800"; public static final String piWebUrl = piBaseUrl + "/web"; public static final String piVisionUrl = piBaseUrl + "/vision"; public static final String piStatusUrl = piBaseUrl + "/status?k=robot"; public static final String piClientVersion = "0.0.0"; public static final boolean epilepsyMode = true; public static final boolean PERMISSION_PNEUMATICS_CONTROL = true; }
package main.design.pattern.singleton; /** * Title : main.design.pattern.singleton <br> * Description : <br> * - * * * * @author main.chile * @version 1.0 * @date 2017/3/31 10:28 */ public class TaskManager { private static TaskManager tm = null; private TaskManager() {} /** * 1 * get singleton instance * very not good, not safe * @return tm */ public static TaskManager getInstance() { if (tm == null) { tm = new TaskManager(); } return tm; } /** * 2 * get singleton instance * @return tm */ public static synchronized TaskManager getInstanceSynchronizedMethod() { if (tm == null) { tm = new TaskManager(); } return tm; } /** * 3 * get singleton instance synchronized * better than {@link #getInstanceSynchronizedMethod()} * @return tm */ public static TaskManager getInstanceSynchronized() { if (null == tm) { synchronized (TaskManager.class) { if (null == tm) { tm = new TaskManager(); } } } return tm; } private static class InstanceHolder { private static final TaskManager INSTANCE = new TaskManager(); } /** * 4 * get singleton instance with inner static class * @return tm */ public static TaskManager getInstanceWithInnerStaticClass() { return InstanceHolder.INSTANCE; } public void displayProcesses() {} public void displayServices() {} }
package deploy; import actuator.GRTDoubleActuator; import actuator.GRTSolenoid; import controller.DriveController; import controller.MechController; import core.GRTConstants; import edu.wpi.first.wpilibj.Compressor; import edu.wpi.first.wpilibj.SpeedController; import edu.wpi.first.wpilibj.Talon; import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj.Victor; import edu.wpi.first.wpilibj.livewindow.LiveWindow; import logger.GRTLogger; import mechanism.Belts; import mechanism.Climber; import mechanism.GRTDriveTrain; import mechanism.PickerUpper; import mechanism.Shooter; import sensor.ButtonBoard; import sensor.GRTBatterySensor; import sensor.GRTEncoder; import sensor.GRTJoystick; import sensor.GRTSwitch; /** * Constructor for the main robot. Put all robot components here. * * @author ajc */ public class MainRobot extends GRTRobot { private GRTDriveTrain dt; /** * Initializer for the robot. Calls an appropriate initialization function. */ public MainRobot() { switch ((int) GRTConstants.getValue("robot")) { case 2012: GRTLogger.logInfo("Starting up 2012 Test Base"); base2012Init(); break; case 2013: GRTLogger.logInfo("Starting up 2013 Test Base"); base2013Init(); break; case -1: GRTLogger.logInfo("Starting up Beta Bot"); betaInit(); } } public void disabled() { GRTLogger.logInfo("Disabling robot. Halting drivetrain"); dt.setMotorSpeeds(0.0, 0.0); } /** * Initializer for beta bot. */ private void betaInit() { GRTJoystick leftPrimary = new GRTJoystick(1, 12, "left primary joy"); GRTJoystick rightPrimary = new GRTJoystick(2, 12, "right primary joy"); GRTJoystick secondary = new GRTJoystick(3, 12, "secondary joy"); leftPrimary.enable(); rightPrimary.enable(); secondary.enable(); leftPrimary.startPolling(); rightPrimary.startPolling(); secondary.startPolling(); GRTLogger.logInfo("Joysticks initialized"); //Battery Sensor GRTBatterySensor batterySensor = new GRTBatterySensor(10, "battery"); batterySensor.startPolling(); batterySensor.enable(); //Shifter solenoids GRTSolenoid leftShifter = new GRTSolenoid((int) GRTConstants.getValue("leftSolenoid")); GRTSolenoid rightShifter = new GRTSolenoid((int) GRTConstants.getValue("rightSolenoid")); // PWM outputs //TODO check motor pins Talon leftDT1 = new Talon((int) GRTConstants.getValue("leftDT1")); Talon leftDT2 = new Talon((int) GRTConstants.getValue("leftDT2")); Talon rightDT1 = new Talon((int) GRTConstants.getValue("rightDT1")); Talon rightDT2 = new Talon((int) GRTConstants.getValue("rightDT2")); GRTLogger.logInfo("Motors initialized"); //Mechanisms dt = new GRTDriveTrain(leftDT1, leftDT2, rightDT1, rightDT2, leftShifter, rightShifter); //TODO Encoders dt.setScaleFactors(1, -1, -1, 1); DriveController dc = new DriveController(dt, leftPrimary, rightPrimary); addTeleopController(dc); //Compressor Compressor compressor = new Compressor((int) GRTConstants.getValue("compressor"), 1); compressor.start(); GRTDoubleActuator doubleSolenoid = new GRTDoubleActuator((int) GRTConstants.getValue("doubleSolenoidPin")); //shooter actuators Talon shooter1 = new Talon((int) GRTConstants.getValue("shooter1")); Talon shooter2 = new Talon((int) GRTConstants.getValue("shooter2")); GRTSolenoid shooterFeeder = new GRTSolenoid((int) GRTConstants.getValue("shooterFeeder")); GRTSolenoid shooterRaiser1 = new GRTSolenoid((int) GRTConstants.getValue("shooterRaiser1")); GRTSolenoid shooterRaiser2 = new GRTSolenoid((int) GRTConstants.getValue("shooterRaiser2")); GRTSolenoid shooterHoldDown = doubleSolenoid.getFirstSolenoid(); Shooter shooter = new Shooter(shooter1, shooter2, shooterFeeder, shooterRaiser1, shooterRaiser2, shooterHoldDown); //Belts actuators Victor beltsMotor = new Victor((int) GRTConstants.getValue("belts")); GRTSolenoid fingerSolenoid = new GRTSolenoid((int) GRTConstants.getValue("fingerSolenoid")); Belts belts = new Belts(beltsMotor, fingerSolenoid); //PickerUpper SpeedController rollerMotor = new Victor((int) GRTConstants.getValue("rollerMotor")); SpeedController raiserMotor = new Victor((int) GRTConstants.getValue("raiserMotor")); GRTSwitch limitUp = new GRTSwitch((int) GRTConstants.getValue("pickUpUpperLimit"), 50, false, "limitUp"); GRTSwitch limitDown = new GRTSwitch((int) GRTConstants.getValue("pickUpLowerLimit"), 50, false, "limitDown"); PickerUpper youTiao = new PickerUpper(rollerMotor, raiserMotor, limitUp, limitDown); //Climber GRTSolenoid solenoid1 = new GRTSolenoid((int) GRTConstants.getValue("climberSolenoid1")); GRTSolenoid solenoid2 = new GRTSolenoid((int) GRTConstants.getValue("climberSolenoid2")); GRTSolenoid engager = doubleSolenoid.getSecondSolenoid(); Climber climber = new Climber(dt, solenoid1, solenoid2, engager); //ButtonBoard ButtonBoard buttonBoard = ButtonBoard.getButtonBoard(); buttonBoard.enable(); buttonBoard.startPolling(); //Mechcontroller MechController mechController = new MechController(leftPrimary, rightPrimary, secondary, null, shooter, youTiao, climber, belts); addTeleopController(mechController); } /** * Initializer for the 2013 robot. */ private void base2013Init() { GRTLogger.logInfo("Base 2013: GRTFramework starting up."); //Driver station components GRTJoystick primary = new GRTJoystick(1, 12, "primary"); GRTJoystick secondary = new GRTJoystick(2, 12, "secondary"); primary.startPolling(); secondary.startPolling(); primary.enable(); secondary.enable(); GRTLogger.logInfo("Joysticks initialized"); //Battery Sensor GRTBatterySensor batterySensor = new GRTBatterySensor(10, "battery"); batterySensor.startPolling(); batterySensor.enable(); //Shifter solenoids GRTSolenoid leftShifter = new GRTSolenoid((int) GRTConstants.getValue("leftSolenoid")); GRTSolenoid rightShifter = new GRTSolenoid((int) GRTConstants.getValue("rightSolenoid")); //Compressor Compressor compressor = new Compressor((int) GRTConstants.getValue("compressor"), 1); compressor.start(); // PWM outputs Victor leftDT1 = new Victor((int) GRTConstants.getValue("leftDT1")); Victor leftDT2 = new Victor((int) GRTConstants.getValue("leftDT2")); Victor rightDT1 = new Victor((int) GRTConstants.getValue("rightDT1")); Victor rightDT2 = new Victor((int) GRTConstants.getValue("rightDT2")); GRTLogger.logInfo("Motors initialized"); //Add to Test Mode LiveWindow.addActuator("DT", "leftDT1", leftDT1); LiveWindow.addActuator("DT", "leftDT2", leftDT2); LiveWindow.addActuator("DT", "rightDT1", rightDT1); LiveWindow.addActuator("DT", "rightDT2", rightDT2); // Encoders GRTEncoder leftEnc = new GRTEncoder((int) GRTConstants.getValue("encoderLeftA"), (int) GRTConstants.getValue("encoderLeftB"), 1, 50, "leftEnc"); GRTEncoder rightEnc = new GRTEncoder((int) GRTConstants.getValue("encoderRightA"), (int) GRTConstants.getValue("encoderRightB"), 1, 50, "rightEnc"); leftEnc.enable(); rightEnc.enable(); leftEnc.startPolling(); rightEnc.startPolling(); GRTLogger.logInfo("Encoders initialized"); //Mechanisms dt = new GRTDriveTrain(leftDT1, leftDT2, rightDT1, rightDT2, leftShifter, rightShifter, leftEnc, rightEnc); GRTLogger.logInfo("Mechanisms initialized"); //Controllers DriveController dc = new DriveController(dt, primary, secondary); GRTLogger.logInfo("Controllers Initialized"); addTeleopController(dc); GRTLogger.logSuccess("Ready to drive."); } /** * Initialize function for the 2012 base. */ private void base2012Init() { GRTLogger.logInfo("2012 Base: GRTFramework starting up."); //Battery Sensor GRTBatterySensor batterySensor = new GRTBatterySensor(10, "battery"); batterySensor.startPolling(); batterySensor.enable(); //Driver station components GRTJoystick joy1 = new GRTJoystick(1, 25, "Joystick"); GRTJoystick joy2 = new GRTJoystick(2, 25, "Joystick"); joy1.startPolling(); joy1.enable(); joy2.startPolling(); joy2.enable(); GRTLogger.logInfo("Joysticks initialized"); // PWM outputs //TODO check motor pins Talon leftDT1 = new Talon((int) GRTConstants.getValue("leftDT1")); Talon leftDT2 = new Talon((int) GRTConstants.getValue("leftDT2")); Talon rightDT1 = new Talon((int) GRTConstants.getValue("rightDT1")); Talon rightDT2 = new Talon((int) GRTConstants.getValue("rightDT2")); GRTLogger.logInfo("Motors initialized"); //Mechanisms dt = new GRTDriveTrain(leftDT1, leftDT2, rightDT1, rightDT2); dt.setScaleFactors(1, -1, -1, 1); DriveController dc = new DriveController(dt, joy1, joy2); addTeleopController(dc); GRTLogger.logInfo("Big G, Litte O"); Timer.delay(.2); GRTLogger.logInfo("Go"); Timer.delay(.4); GRTLogger.logInfo("Go"); Timer.delay(.4); GRTLogger.logInfo("Go!"); } public void test() { while (isTest() && isEnabled()) { LiveWindow.run(); Timer.delay(.1); } } }
package org.broadinstitute.sting.playground.fourbasecaller; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import org.broadinstitute.sting.utils.cmdLine.CommandLineProgram; import org.broadinstitute.sting.utils.QualityUtils; import org.broadinstitute.sting.playground.illumina.FirecrestFileParser; import org.broadinstitute.sting.playground.illumina.FirecrestReadData; import org.broadinstitute.sting.playground.illumina.FourIntensity; //import org.broadinstitute.sting.playground.illumina.IlluminaParser; import net.sf.samtools.SAMFileHeader; import net.sf.samtools.SAMFileWriter; import net.sf.samtools.SAMFileWriterFactory; import net.sf.samtools.SAMRecord; import edu.mit.broad.picard.illumina.BustardFileParser; import edu.mit.broad.picard.illumina.BustardReadData; public class FourBaseRecaller extends CommandLineProgram { public static FourBaseRecaller Instance = null; public File DIR; public int LANE; public File OUT; public int END = 0; public int TRAINING_LIMIT = 1000000000; public int CALLING_LIMIT = 1000000000; public Boolean RAW = false; public static void main(String[] argv) { Instance = new FourBaseRecaller(); start(Instance, argv); } protected void setupArgs() { m_parser.addRequiredArg("dir", "D", "Illumina Bustard directory", "DIR"); m_parser.addRequiredArg("lane", "L", "Illumina flowcell lane", "LANE"); m_parser.addRequiredArg("out", "O", "Output path for sam file", "OUT"); m_parser.addOptionalArg("end", "E", "End of read to process (0 = whole read, i.e. unpaired; 1 = first end; 2 = second end)", "END"); m_parser.addOptionalArg("tlim", "T", "Number of reads to use for parameter initialization", "TRAINING_LIMIT"); m_parser.addOptionalArg("clim", "C", "Number of reads to basecall", "CALLING_LIMIT"); m_parser.addOptionalFlag("raw", "R", "Use raw intensities?", "RAW"); } protected int execute() { boolean isPaired = (END > 0); //IlluminaParser ip = new IlluminaParser(DIR, LANE, 0); //System.exit(1); // Set up debugging paths File debugdir = new File(OUT.getPath() + ".debug/"); debugdir.mkdir(); PrintWriter debugout = null; try { debugout = new PrintWriter(debugdir.getPath() + "/debug.out"); } catch (IOException e) { } BustardFileParser bfp; BustardReadData bread; FirecrestFileParser ffp; FirecrestReadData fread; bfp = new BustardFileParser(DIR, LANE, isPaired, "FB"); bread = bfp.next(); ffp = new FirecrestFileParser(DIR.getParentFile(), LANE); fread = ffp.next(); int cycle_offset = (END <= 1) ? 0 : bread.getIntensities().length/2; BasecallingReadModel model = new BasecallingReadModel(bread.getFirstReadSequence().length()); int queryid; // learn mean parameters if (debugout != null) { debugout.println("intensity cycle int_a int_c int_g int_t base"); } queryid = 0; do { String bases = (END <= 1) ? bread.getFirstReadSequence() : bread.getSecondReadSequence(); byte[] quals = (END <= 1) ? bread.getFirstReadPhredBinaryQualities() : bread.getSecondReadPhredBinaryQualities(); double[][] intensities = bread.getIntensities(); double[][] rawintensities = fread.getIntensities(); for (int cycle = 0; cycle < bases.length(); cycle++) { char baseCur = bases.charAt(cycle); byte qualCur = quals[cycle]; double[] fourintensity = (RAW) ? rawintensities[cycle + cycle_offset] : intensities[cycle + cycle_offset]; if (debugout != null && cycle >= 31 && cycle <= 33) { debugout.println("intensity " + cycle + " " + fourintensity[0] + " " + fourintensity[1] + " " + fourintensity[2] + " " + fourintensity[3] + " " + baseCur); } model.addMeanPoint(cycle, baseCur, qualCur, fourintensity); } queryid++; } while (queryid < TRAINING_LIMIT && bfp.hasNext() && (bread = bfp.next()) != null && (fread = ffp.next()) != null); debugout.close(); // learn covariance parameters bfp = new BustardFileParser(DIR, LANE, isPaired, "FB"); bread = bfp.next(); ffp = new FirecrestFileParser(DIR.getParentFile(), LANE); fread = ffp.next(); queryid = 0; do { String bases = (END <= 1) ? bread.getFirstReadSequence() : bread.getSecondReadSequence(); byte[] quals = (END <= 1) ? bread.getFirstReadPhredBinaryQualities() : bread.getSecondReadPhredBinaryQualities(); double[][] intensities = bread.getIntensities(); double[][] rawintensities = fread.getIntensities(); for (int cycle = 0; cycle < bases.length(); cycle++) { char baseCur = bases.charAt(cycle); byte qualCur = quals[cycle]; double[] fourintensity = (RAW) ? rawintensities[cycle + cycle_offset] : intensities[cycle + cycle_offset]; model.addCovariancePoint(cycle, baseCur, qualCur, fourintensity); } queryid++; } while (queryid < TRAINING_LIMIT && bfp.hasNext() && (bread = bfp.next()) != null && (fread = ffp.next()) != null); // write debugging info model.write(debugdir); // call bases SAMFileHeader sfh = new SAMFileHeader(); SAMFileWriter sfw = new SAMFileWriterFactory().makeSAMOrBAMWriter(sfh, false, OUT); bfp = new BustardFileParser(DIR, LANE, isPaired, "FB"); bread = bfp.next(); ffp = new FirecrestFileParser(DIR.getParentFile(), LANE); fread = ffp.next(); queryid = 0; do { String bases = (END <= 1) ? bread.getFirstReadSequence() : bread.getSecondReadSequence(); byte[] quals = (END <= 1) ? bread.getFirstReadPhredBinaryQualities() : bread.getSecondReadPhredBinaryQualities(); double[][] intensities = bread.getIntensities(); double[][] rawintensities = fread.getIntensities(); byte[] asciiseq = new byte[bases.length()]; byte[] bestqual = new byte[bases.length()]; byte[] nextbestqual = new byte[bases.length()]; for (int cycle = 0; cycle < bases.length(); cycle++) { double[] fourintensity = (RAW) ? rawintensities[cycle + cycle_offset] : intensities[cycle + cycle_offset]; FourProb fp = model.computeProbabilities(cycle, fourintensity); asciiseq[cycle] = (byte) fp.baseAtRank(0); bestqual[cycle] = fp.qualAtRank(0); nextbestqual[cycle] = QualityUtils.baseAndProbToCompressedQuality(fp.indexAtRank(1), fp.probAtRank(1)); } sfw.addAlignment(constructSAMRecord("KIR_", new String(asciiseq), bestqual, nextbestqual, isPaired, END, bread, sfh)); sfw.addAlignment(constructSAMRecord("BUS_", bases, quals, null, isPaired, END, bread, sfh)); queryid++; } while (queryid < CALLING_LIMIT && bfp.hasNext() && (bread = bfp.next()) != null); sfw.close(); return 0; } private SAMRecord constructSAMRecord(String readNamePrefix, String bases, byte[] bestqual, byte[] nextbestqual, boolean isPaired, int END, BustardReadData bread, SAMFileHeader sfh) { SAMRecord sr = new SAMRecord(sfh); sr.setReadName(readNamePrefix + bread.getReadName()); sr.setReadUmappedFlag(true); sr.setReadString(bases); sr.setBaseQualities(bestqual); if (nextbestqual != null) { sr.setAttribute("SQ", nextbestqual); } sr.setReadFailsVendorQualityCheckFlag(!bread.isPf()); if (isPaired) { sr.setMateUnmappedFlag(true); sr.setFirstOfPairFlag(END <= 1); sr.setFirstOfPairFlag(END > 1); } return sr; } }